From webhook-mailer at python.org Mon Jun 1 02:58:21 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Mon, 01 Jun 2020 06:58:21 -0000 Subject: [Python-checkins] bpo-30008: Fix OpenSSL no-deprecated compilation (GH-20397) Message-ID: https://github.com/python/cpython/commit/a871f692b4a2e6c7d45579693e787edc0af1a02c commit: a871f692b4a2e6c7d45579693e787edc0af1a02c branch: master author: Christian Heimes committer: GitHub date: 2020-06-01T08:58:14+02:00 summary: bpo-30008: Fix OpenSSL no-deprecated compilation (GH-20397) Fix :mod:`ssl`` code to be compatible with OpenSSL 1.1.x builds that use ``no-deprecated`` and ``--api=1.1.0``. Note: Tests assume full OpenSSL API and fail with limited API. Signed-off-by: Christian Heimes Co-authored-by: Mark Wright files: A Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst M Modules/_ssl.c M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst b/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst new file mode 100644 index 0000000000000..c4cfa56ce02c5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst @@ -0,0 +1,2 @@ +Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds that use +``no-deprecated`` and ``--api=1.1.0``. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index 5fe65a8a1d6df..5e82fe41a76ec 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -142,6 +142,24 @@ static void _PySSLFixErrno(void) { # define PY_OPENSSL_1_1_API 1 #endif +/* OpenSSL API compat */ +#ifdef OPENSSL_API_COMPAT +#if OPENSSL_API_COMPAT >= 0x10100000L + +/* OpenSSL API 1.1.0+ does not include version methods */ +#ifndef OPENSSL_NO_TLS1_METHOD +#define OPENSSL_NO_TLS1_METHOD 1 +#endif +#ifndef OPENSSL_NO_TLS1_1_METHOD +#define OPENSSL_NO_TLS1_1_METHOD 1 +#endif +#ifndef OPENSSL_NO_TLS1_2_METHOD +#define OPENSSL_NO_TLS1_2_METHOD 1 +#endif + +#endif /* >= 1.1.0 compcat */ +#endif /* OPENSSL_API_COMPAT */ + /* LibreSSL 2.7.0 provides necessary OpenSSL 1.1.0 APIs */ #if defined(LIBRESSL_VERSION_NUMBER) && LIBRESSL_VERSION_NUMBER >= 0x2070000fL # define PY_OPENSSL_1_1_API 1 @@ -201,6 +219,12 @@ static void _PySSLFixErrno(void) { #define TLS_method SSLv23_method #define TLS_client_method SSLv23_client_method #define TLS_server_method SSLv23_server_method +#define ASN1_STRING_get0_data ASN1_STRING_data +#define X509_get0_notBefore X509_get_notBefore +#define X509_get0_notAfter X509_get_notAfter +#define OpenSSL_version_num SSLeay +#define OpenSSL_version SSLeay_version +#define OPENSSL_VERSION SSLEAY_VERSION static int X509_NAME_ENTRY_set(const X509_NAME_ENTRY *ne) { @@ -885,7 +909,7 @@ _ssl_configure_hostname(PySSLSocket *self, const char* server_hostname) goto error; } } else { - if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_data(ip), + if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_get0_data(ip), ASN1_STRING_length(ip))) { _setSSLError(NULL, 0, __FILE__, __LINE__); goto error; @@ -1361,7 +1385,7 @@ _get_peer_alt_names (X509 *certificate) { goto fail; } PyTuple_SET_ITEM(t, 0, v); - v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_data(as), + v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_get0_data(as), ASN1_STRING_length(as)); if (v == NULL) { Py_DECREF(t); @@ -1657,7 +1681,7 @@ _decode_certificate(X509 *certificate) { ASN1_INTEGER *serialNumber; char buf[2048]; int len, result; - ASN1_TIME *notBefore, *notAfter; + const ASN1_TIME *notBefore, *notAfter; PyObject *pnotBefore, *pnotAfter; retval = PyDict_New(); @@ -1719,7 +1743,7 @@ _decode_certificate(X509 *certificate) { Py_DECREF(sn_obj); (void) BIO_reset(biobuf); - notBefore = X509_get_notBefore(certificate); + notBefore = X509_get0_notBefore(certificate); ASN1_TIME_print(biobuf, notBefore); len = BIO_gets(biobuf, buf, sizeof(buf)-1); if (len < 0) { @@ -1736,7 +1760,7 @@ _decode_certificate(X509 *certificate) { Py_DECREF(pnotBefore); (void) BIO_reset(biobuf); - notAfter = X509_get_notAfter(certificate); + notAfter = X509_get0_notAfter(certificate); ASN1_TIME_print(biobuf, notAfter); len = BIO_gets(biobuf, buf, sizeof(buf)-1); if (len < 0) { @@ -3079,17 +3103,23 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) ctx = SSL_CTX_new(SSLv3_method()); break; #endif -#if defined(TLS1_VERSION) && !defined(OPENSSL_NO_TLS1) +#if (defined(TLS1_VERSION) && \ + !defined(OPENSSL_NO_TLS1) && \ + !defined(OPENSSL_NO_TLS1_METHOD)) case PY_SSL_VERSION_TLS1: ctx = SSL_CTX_new(TLSv1_method()); break; #endif -#if defined(TLS1_1_VERSION) && !defined(OPENSSL_NO_TLS1_1) +#if (defined(TLS1_1_VERSION) && \ + !defined(OPENSSL_NO_TLS1_1) && \ + !defined(OPENSSL_NO_TLS1_1_METHOD)) case PY_SSL_VERSION_TLS1_1: ctx = SSL_CTX_new(TLSv1_1_method()); break; #endif -#if defined(TLS1_2_VERSION) && !defined(OPENSSL_NO_TLS1_2) +#if (defined(TLS1_2_VERSION) && \ + !defined(OPENSSL_NO_TLS1_2) && \ + !defined(OPENSSL_NO_TLS1_2_METHOD)) case PY_SSL_VERSION_TLS1_2: ctx = SSL_CTX_new(TLSv1_2_method()); break; @@ -3207,7 +3237,7 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) conservative and assume it wasn't fixed until release. We do this check at runtime to avoid problems from the dynamic linker. See #25672 for more on this. */ - libver = SSLeay(); + libver = OpenSSL_version_num(); if (!(libver >= 0x10001000UL && libver < 0x1000108fUL) && !(libver >= 0x10000000UL && libver < 0x100000dfUL)) { SSL_CTX_set_mode(self->ctx, SSL_MODE_RELEASE_BUFFERS); @@ -5286,7 +5316,11 @@ PySSL_RAND(int len, int pseudo) if (bytes == NULL) return NULL; if (pseudo) { +#ifdef PY_OPENSSL_1_1_API + ok = RAND_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len); +#else ok = RAND_pseudo_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len); +#endif if (ok == 0 || ok == 1) return Py_BuildValue("NO", bytes, ok == 1 ? Py_True : Py_False); } @@ -6373,7 +6407,7 @@ PyInit__ssl(void) /* SSLeay() gives us the version of the library linked against, which could be different from the headers version. */ - libver = SSLeay(); + libver = OpenSSL_version_num(); r = PyLong_FromUnsignedLong(libver); if (r == NULL) return NULL; @@ -6383,7 +6417,7 @@ PyInit__ssl(void) r = Py_BuildValue("IIIII", major, minor, fix, patch, status); if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION_INFO", r)) return NULL; - r = PyUnicode_FromString(SSLeay_version(SSLEAY_VERSION)); + r = PyUnicode_FromString(OpenSSL_version(OPENSSL_VERSION)); if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION", r)) return NULL; diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 12af98d12c45d..3818165a836fb 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -314,6 +314,7 @@ def _build_src(self): "shared", "--debug", "--prefix={}".format(self.install_dir) ] + # cmd.extend(["no-deprecated", "--api=1.1.0"]) env = os.environ.copy() # set rpath env["LD_RUN_PATH"] = self.lib_dir From webhook-mailer at python.org Mon Jun 1 03:12:00 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Jun 2020 07:12:00 -0000 Subject: [Python-checkins] bpo-30008: Fix OpenSSL no-deprecated compilation (GH-20397) Message-ID: https://github.com/python/cpython/commit/296db8cc2fd089d0d2f23b7dddafc029be9f1eb6 commit: 296db8cc2fd089d0d2f23b7dddafc029be9f1eb6 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-01T00:11:51-07:00 summary: bpo-30008: Fix OpenSSL no-deprecated compilation (GH-20397) Fix :mod:`ssl`` code to be compatible with OpenSSL 1.1.x builds that use ``no-deprecated`` and ``--api=1.1.0``. Note: Tests assume full OpenSSL API and fail with limited API. Signed-off-by: Christian Heimes Co-authored-by: Mark Wright (cherry picked from commit a871f692b4a2e6c7d45579693e787edc0af1a02c) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst M Modules/_ssl.c M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst b/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst new file mode 100644 index 0000000000000..c4cfa56ce02c5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst @@ -0,0 +1,2 @@ +Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds that use +``no-deprecated`` and ``--api=1.1.0``. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index bc412ac139476..93cc529e796a0 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -144,6 +144,24 @@ static void _PySSLFixErrno(void) { # define PY_OPENSSL_1_1_API 1 #endif +/* OpenSSL API compat */ +#ifdef OPENSSL_API_COMPAT +#if OPENSSL_API_COMPAT >= 0x10100000L + +/* OpenSSL API 1.1.0+ does not include version methods */ +#ifndef OPENSSL_NO_TLS1_METHOD +#define OPENSSL_NO_TLS1_METHOD 1 +#endif +#ifndef OPENSSL_NO_TLS1_1_METHOD +#define OPENSSL_NO_TLS1_1_METHOD 1 +#endif +#ifndef OPENSSL_NO_TLS1_2_METHOD +#define OPENSSL_NO_TLS1_2_METHOD 1 +#endif + +#endif /* >= 1.1.0 compcat */ +#endif /* OPENSSL_API_COMPAT */ + /* LibreSSL 2.7.0 provides necessary OpenSSL 1.1.0 APIs */ #if defined(LIBRESSL_VERSION_NUMBER) && LIBRESSL_VERSION_NUMBER >= 0x2070000fL # define PY_OPENSSL_1_1_API 1 @@ -199,6 +217,12 @@ static void _PySSLFixErrno(void) { #define TLS_method SSLv23_method #define TLS_client_method SSLv23_client_method #define TLS_server_method SSLv23_server_method +#define ASN1_STRING_get0_data ASN1_STRING_data +#define X509_get0_notBefore X509_get_notBefore +#define X509_get0_notAfter X509_get_notAfter +#define OpenSSL_version_num SSLeay +#define OpenSSL_version SSLeay_version +#define OPENSSL_VERSION SSLEAY_VERSION static int X509_NAME_ENTRY_set(const X509_NAME_ENTRY *ne) { @@ -857,7 +881,7 @@ _ssl_configure_hostname(PySSLSocket *self, const char* server_hostname) goto error; } } else { - if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_data(ip), + if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_get0_data(ip), ASN1_STRING_length(ip))) { _setSSLError(NULL, 0, __FILE__, __LINE__); goto error; @@ -1330,7 +1354,7 @@ _get_peer_alt_names (X509 *certificate) { goto fail; } PyTuple_SET_ITEM(t, 0, v); - v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_data(as), + v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_get0_data(as), ASN1_STRING_length(as)); if (v == NULL) { Py_DECREF(t); @@ -1626,7 +1650,7 @@ _decode_certificate(X509 *certificate) { ASN1_INTEGER *serialNumber; char buf[2048]; int len, result; - ASN1_TIME *notBefore, *notAfter; + const ASN1_TIME *notBefore, *notAfter; PyObject *pnotBefore, *pnotAfter; retval = PyDict_New(); @@ -1688,7 +1712,7 @@ _decode_certificate(X509 *certificate) { Py_DECREF(sn_obj); (void) BIO_reset(biobuf); - notBefore = X509_get_notBefore(certificate); + notBefore = X509_get0_notBefore(certificate); ASN1_TIME_print(biobuf, notBefore); len = BIO_gets(biobuf, buf, sizeof(buf)-1); if (len < 0) { @@ -1705,7 +1729,7 @@ _decode_certificate(X509 *certificate) { Py_DECREF(pnotBefore); (void) BIO_reset(biobuf); - notAfter = X509_get_notAfter(certificate); + notAfter = X509_get0_notAfter(certificate); ASN1_TIME_print(biobuf, notAfter); len = BIO_gets(biobuf, buf, sizeof(buf)-1); if (len < 0) { @@ -3023,17 +3047,23 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) ctx = SSL_CTX_new(SSLv3_method()); break; #endif -#if defined(TLS1_VERSION) && !defined(OPENSSL_NO_TLS1) +#if (defined(TLS1_VERSION) && \ + !defined(OPENSSL_NO_TLS1) && \ + !defined(OPENSSL_NO_TLS1_METHOD)) case PY_SSL_VERSION_TLS1: ctx = SSL_CTX_new(TLSv1_method()); break; #endif -#if defined(TLS1_1_VERSION) && !defined(OPENSSL_NO_TLS1_1) +#if (defined(TLS1_1_VERSION) && \ + !defined(OPENSSL_NO_TLS1_1) && \ + !defined(OPENSSL_NO_TLS1_1_METHOD)) case PY_SSL_VERSION_TLS1_1: ctx = SSL_CTX_new(TLSv1_1_method()); break; #endif -#if defined(TLS1_2_VERSION) && !defined(OPENSSL_NO_TLS1_2) +#if (defined(TLS1_2_VERSION) && \ + !defined(OPENSSL_NO_TLS1_2) && \ + !defined(OPENSSL_NO_TLS1_2_METHOD)) case PY_SSL_VERSION_TLS1_2: ctx = SSL_CTX_new(TLSv1_2_method()); break; @@ -3146,7 +3176,7 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) conservative and assume it wasn't fixed until release. We do this check at runtime to avoid problems from the dynamic linker. See #25672 for more on this. */ - libver = SSLeay(); + libver = OpenSSL_version_num(); if (!(libver >= 0x10001000UL && libver < 0x1000108fUL) && !(libver >= 0x10000000UL && libver < 0x100000dfUL)) { SSL_CTX_set_mode(self->ctx, SSL_MODE_RELEASE_BUFFERS); @@ -5156,7 +5186,11 @@ PySSL_RAND(int len, int pseudo) if (bytes == NULL) return NULL; if (pseudo) { +#ifdef PY_OPENSSL_1_1_API + ok = RAND_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len); +#else ok = RAND_pseudo_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len); +#endif if (ok == 0 || ok == 1) return Py_BuildValue("NO", bytes, ok == 1 ? Py_True : Py_False); } @@ -6240,7 +6274,7 @@ PyInit__ssl(void) /* SSLeay() gives us the version of the library linked against, which could be different from the headers version. */ - libver = SSLeay(); + libver = OpenSSL_version_num(); r = PyLong_FromUnsignedLong(libver); if (r == NULL) return NULL; @@ -6250,7 +6284,7 @@ PyInit__ssl(void) r = Py_BuildValue("IIIII", major, minor, fix, patch, status); if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION_INFO", r)) return NULL; - r = PyUnicode_FromString(SSLeay_version(SSLEAY_VERSION)); + r = PyUnicode_FromString(OpenSSL_version(OPENSSL_VERSION)); if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION", r)) return NULL; diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 12af98d12c45d..3818165a836fb 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -314,6 +314,7 @@ def _build_src(self): "shared", "--debug", "--prefix={}".format(self.install_dir) ] + # cmd.extend(["no-deprecated", "--api=1.1.0"]) env = os.environ.copy() # set rpath env["LD_RUN_PATH"] = self.lib_dir From webhook-mailer at python.org Mon Jun 1 03:17:23 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Jun 2020 07:17:23 -0000 Subject: [Python-checkins] bpo-30008: Fix OpenSSL no-deprecated compilation (GH-20397) Message-ID: https://github.com/python/cpython/commit/9c0ff178a5d5d0992c0be21a7f343a495338ad73 commit: 9c0ff178a5d5d0992c0be21a7f343a495338ad73 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-01T00:17:16-07:00 summary: bpo-30008: Fix OpenSSL no-deprecated compilation (GH-20397) Fix :mod:`ssl`` code to be compatible with OpenSSL 1.1.x builds that use ``no-deprecated`` and ``--api=1.1.0``. Note: Tests assume full OpenSSL API and fail with limited API. Signed-off-by: Christian Heimes Co-authored-by: Mark Wright (cherry picked from commit a871f692b4a2e6c7d45579693e787edc0af1a02c) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst M Modules/_ssl.c M Tools/ssl/multissltests.py diff --git a/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst b/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst new file mode 100644 index 0000000000000..c4cfa56ce02c5 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst @@ -0,0 +1,2 @@ +Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds that use +``no-deprecated`` and ``--api=1.1.0``. diff --git a/Modules/_ssl.c b/Modules/_ssl.c index b0e3c0432f51d..9fbaecb80739f 100644 --- a/Modules/_ssl.c +++ b/Modules/_ssl.c @@ -144,6 +144,24 @@ static void _PySSLFixErrno(void) { # define PY_OPENSSL_1_1_API 1 #endif +/* OpenSSL API compat */ +#ifdef OPENSSL_API_COMPAT +#if OPENSSL_API_COMPAT >= 0x10100000L + +/* OpenSSL API 1.1.0+ does not include version methods */ +#ifndef OPENSSL_NO_TLS1_METHOD +#define OPENSSL_NO_TLS1_METHOD 1 +#endif +#ifndef OPENSSL_NO_TLS1_1_METHOD +#define OPENSSL_NO_TLS1_1_METHOD 1 +#endif +#ifndef OPENSSL_NO_TLS1_2_METHOD +#define OPENSSL_NO_TLS1_2_METHOD 1 +#endif + +#endif /* >= 1.1.0 compcat */ +#endif /* OPENSSL_API_COMPAT */ + /* LibreSSL 2.7.0 provides necessary OpenSSL 1.1.0 APIs */ #if defined(LIBRESSL_VERSION_NUMBER) && LIBRESSL_VERSION_NUMBER >= 0x2070000fL # define PY_OPENSSL_1_1_API 1 @@ -203,6 +221,12 @@ static void _PySSLFixErrno(void) { #define TLS_method SSLv23_method #define TLS_client_method SSLv23_client_method #define TLS_server_method SSLv23_server_method +#define ASN1_STRING_get0_data ASN1_STRING_data +#define X509_get0_notBefore X509_get_notBefore +#define X509_get0_notAfter X509_get_notAfter +#define OpenSSL_version_num SSLeay +#define OpenSSL_version SSLeay_version +#define OPENSSL_VERSION SSLEAY_VERSION static int X509_NAME_ENTRY_set(const X509_NAME_ENTRY *ne) { @@ -887,7 +911,7 @@ _ssl_configure_hostname(PySSLSocket *self, const char* server_hostname) goto error; } } else { - if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_data(ip), + if (!X509_VERIFY_PARAM_set1_ip(param, ASN1_STRING_get0_data(ip), ASN1_STRING_length(ip))) { _setSSLError(NULL, 0, __FILE__, __LINE__); goto error; @@ -1363,7 +1387,7 @@ _get_peer_alt_names (X509 *certificate) { goto fail; } PyTuple_SET_ITEM(t, 0, v); - v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_data(as), + v = PyUnicode_FromStringAndSize((char *)ASN1_STRING_get0_data(as), ASN1_STRING_length(as)); if (v == NULL) { Py_DECREF(t); @@ -1659,7 +1683,7 @@ _decode_certificate(X509 *certificate) { ASN1_INTEGER *serialNumber; char buf[2048]; int len, result; - ASN1_TIME *notBefore, *notAfter; + const ASN1_TIME *notBefore, *notAfter; PyObject *pnotBefore, *pnotAfter; retval = PyDict_New(); @@ -1721,7 +1745,7 @@ _decode_certificate(X509 *certificate) { Py_DECREF(sn_obj); (void) BIO_reset(biobuf); - notBefore = X509_get_notBefore(certificate); + notBefore = X509_get0_notBefore(certificate); ASN1_TIME_print(biobuf, notBefore); len = BIO_gets(biobuf, buf, sizeof(buf)-1); if (len < 0) { @@ -1738,7 +1762,7 @@ _decode_certificate(X509 *certificate) { Py_DECREF(pnotBefore); (void) BIO_reset(biobuf); - notAfter = X509_get_notAfter(certificate); + notAfter = X509_get0_notAfter(certificate); ASN1_TIME_print(biobuf, notAfter); len = BIO_gets(biobuf, buf, sizeof(buf)-1); if (len < 0) { @@ -3081,17 +3105,23 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) ctx = SSL_CTX_new(SSLv3_method()); break; #endif -#if defined(TLS1_VERSION) && !defined(OPENSSL_NO_TLS1) +#if (defined(TLS1_VERSION) && \ + !defined(OPENSSL_NO_TLS1) && \ + !defined(OPENSSL_NO_TLS1_METHOD)) case PY_SSL_VERSION_TLS1: ctx = SSL_CTX_new(TLSv1_method()); break; #endif -#if defined(TLS1_1_VERSION) && !defined(OPENSSL_NO_TLS1_1) +#if (defined(TLS1_1_VERSION) && \ + !defined(OPENSSL_NO_TLS1_1) && \ + !defined(OPENSSL_NO_TLS1_1_METHOD)) case PY_SSL_VERSION_TLS1_1: ctx = SSL_CTX_new(TLSv1_1_method()); break; #endif -#if defined(TLS1_2_VERSION) && !defined(OPENSSL_NO_TLS1_2) +#if (defined(TLS1_2_VERSION) && \ + !defined(OPENSSL_NO_TLS1_2) && \ + !defined(OPENSSL_NO_TLS1_2_METHOD)) case PY_SSL_VERSION_TLS1_2: ctx = SSL_CTX_new(TLSv1_2_method()); break; @@ -3209,7 +3239,7 @@ _ssl__SSLContext_impl(PyTypeObject *type, int proto_version) conservative and assume it wasn't fixed until release. We do this check at runtime to avoid problems from the dynamic linker. See #25672 for more on this. */ - libver = SSLeay(); + libver = OpenSSL_version_num(); if (!(libver >= 0x10001000UL && libver < 0x1000108fUL) && !(libver >= 0x10000000UL && libver < 0x100000dfUL)) { SSL_CTX_set_mode(self->ctx, SSL_MODE_RELEASE_BUFFERS); @@ -5289,7 +5319,11 @@ PySSL_RAND(int len, int pseudo) if (bytes == NULL) return NULL; if (pseudo) { +#ifdef PY_OPENSSL_1_1_API + ok = RAND_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len); +#else ok = RAND_pseudo_bytes((unsigned char*)PyBytes_AS_STRING(bytes), len); +#endif if (ok == 0 || ok == 1) return Py_BuildValue("NO", bytes, ok == 1 ? Py_True : Py_False); } @@ -6376,7 +6410,7 @@ PyInit__ssl(void) /* SSLeay() gives us the version of the library linked against, which could be different from the headers version. */ - libver = SSLeay(); + libver = OpenSSL_version_num(); r = PyLong_FromUnsignedLong(libver); if (r == NULL) return NULL; @@ -6386,7 +6420,7 @@ PyInit__ssl(void) r = Py_BuildValue("IIIII", major, minor, fix, patch, status); if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION_INFO", r)) return NULL; - r = PyUnicode_FromString(SSLeay_version(SSLEAY_VERSION)); + r = PyUnicode_FromString(OpenSSL_version(OPENSSL_VERSION)); if (r == NULL || PyModule_AddObject(m, "OPENSSL_VERSION", r)) return NULL; diff --git a/Tools/ssl/multissltests.py b/Tools/ssl/multissltests.py index 12af98d12c45d..3818165a836fb 100755 --- a/Tools/ssl/multissltests.py +++ b/Tools/ssl/multissltests.py @@ -314,6 +314,7 @@ def _build_src(self): "shared", "--debug", "--prefix={}".format(self.install_dir) ] + # cmd.extend(["no-deprecated", "--api=1.1.0"]) env = os.environ.copy() # set rpath env["LD_RUN_PATH"] = self.lib_dir From webhook-mailer at python.org Mon Jun 1 05:42:59 2020 From: webhook-mailer at python.org (Mark Shannon) Date: Mon, 01 Jun 2020 09:42:59 -0000 Subject: [Python-checkins] Make sure that keyword arguments are merged into the arguments dictionary when dict unpacking and keyword arguments are interleaved. (GH-20553) Message-ID: https://github.com/python/cpython/commit/db64f12e4deda2abbafb6d2bd5c06762fca991ff commit: db64f12e4deda2abbafb6d2bd5c06762fca991ff branch: master author: Mark Shannon committer: GitHub date: 2020-06-01T10:42:42+01:00 summary: Make sure that keyword arguments are merged into the arguments dictionary when dict unpacking and keyword arguments are interleaved. (GH-20553) files: M Lib/test/test_extcall.py M Python/compile.c diff --git a/Lib/test/test_extcall.py b/Lib/test/test_extcall.py index 1faf29e01d3ca..4205ca82222f2 100644 --- a/Lib/test/test_extcall.py +++ b/Lib/test/test_extcall.py @@ -79,6 +79,24 @@ >>> f(1, 2, 3, *(4, 5), x=6, y=7, **UserDict(a=8, b=9)) (1, 2, 3, 4, 5) {'a': 8, 'b': 9, 'x': 6, 'y': 7} +Mix keyword arguments and dict unpacking + + >>> d1 = {'a':1} + + >>> d2 = {'c':3} + + >>> f(b=2, **d1, **d2) + () {'a': 1, 'b': 2, 'c': 3} + + >>> f(**d1, b=2, **d2) + () {'a': 1, 'b': 2, 'c': 3} + + >>> f(**d1, **d2, b=2) + () {'a': 1, 'b': 2, 'c': 3} + + >>> f(**d1, b=2, **d2, d=4) + () {'a': 1, 'b': 2, 'c': 3, 'd': 4} + Examples with invalid arguments (TypeErrors). We're also testing the function names in the exception messages. diff --git a/Python/compile.c b/Python/compile.c index 4a587c00fd402..fccc688affca6 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -4321,6 +4321,9 @@ compiler_call_helper(struct compiler *c, if (!compiler_subkwargs(c, keywords, i - nseen, i)) { return 0; } + if (have_dict) { + ADDOP_I(c, DICT_MERGE, 1); + } have_dict = 1; nseen = 0; } From webhook-mailer at python.org Mon Jun 1 10:02:48 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 01 Jun 2020 14:02:48 -0000 Subject: [Python-checkins] bpo-40826: Add _Py_EnsureTstateNotNULL() macro (GH-20571) Message-ID: https://github.com/python/cpython/commit/3026cad59b87751a9215111776cac8e819458fce commit: 3026cad59b87751a9215111776cac8e819458fce branch: master author: Victor Stinner committer: GitHub date: 2020-06-01T16:02:40+02:00 summary: bpo-40826: Add _Py_EnsureTstateNotNULL() macro (GH-20571) Add _Py_EnsureTstateNotNULL(tstate) macro: call Py_FatalError() if tstate is NULL, the error message contains the current function name. files: M Include/internal/pycore_pystate.h M Lib/test/test_capi.py M Python/ceval.c M Python/errors.c M Python/pystate.c M Python/sysmodule.c diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index d96ba31207001..7ac4ad5869b4c 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -86,6 +86,21 @@ _PyThreadState_GET(void) #undef PyThreadState_GET #define PyThreadState_GET() _PyThreadState_GET() +PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalError_TstateNULL(const char *func); + +static inline void +_Py_EnsureFuncTstateNotNULL(const char *func, PyThreadState *tstate) +{ + if (tstate == NULL) { + _Py_FatalError_TstateNULL(func); + } +} + +// Call Py_FatalError() if tstate is NULL +#define _Py_EnsureTstateNotNULL(tstate) \ + _Py_EnsureFuncTstateNotNULL(__func__, tstate) + + /* Get the current interpreter state. The macro is unsafe: it does not check for error and it can return NULL. @@ -96,7 +111,9 @@ _PyThreadState_GET(void) and _PyGILState_GetInterpreterStateUnsafe(). */ static inline PyInterpreterState* _PyInterpreterState_GET(void) { PyThreadState *tstate = _PyThreadState_GET(); - assert(tstate != NULL); +#ifdef Py_DEBUG + _Py_EnsureTstateNotNULL(tstate); +#endif return tstate->interp; } diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 44693b8fdd717..5b8b9f6a86f4b 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -68,7 +68,10 @@ def test_no_FatalError_infinite_loop(self): self.assertTrue(err.rstrip().startswith( b'Fatal Python error: ' b'PyThreadState_Get: ' - b'current thread state is NULL (released GIL?)')) + b'the function must be called with the GIL held, ' + b'but the GIL is released ' + b'(the current Python thread state is NULL)'), + err) def test_memoryview_from_NULL_pointer(self): self.assertRaises(ValueError, _testcapi.make_memoryview_from_NULL_pointer) diff --git a/Python/ceval.c b/Python/ceval.c index a79773f85118a..01dd361e5035f 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -240,16 +240,15 @@ UNSIGNAL_ASYNC_EXC(PyInterpreterState *interp) #endif #include "ceval_gil.h" -static void -ensure_tstate_not_null(const char *func, PyThreadState *tstate) +void _Py_NO_RETURN +_Py_FatalError_TstateNULL(const char *func) { - if (tstate == NULL) { - _Py_FatalErrorFunc(func, - "current thread state is NULL (released GIL?)"); - } + _Py_FatalErrorFunc(func, + "the function must be called with the GIL held, " + "but the GIL is released " + "(the current Python thread state is NULL)"); } - #ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS int _PyEval_ThreadsInitialized(PyInterpreterState *interp) @@ -374,7 +373,7 @@ PyEval_AcquireLock(void) { _PyRuntimeState *runtime = &_PyRuntime; PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); take_gil(tstate); } @@ -403,7 +402,7 @@ _PyEval_ReleaseLock(PyThreadState *tstate) void PyEval_AcquireThread(PyThreadState *tstate) { - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); take_gil(tstate); @@ -442,7 +441,7 @@ void _PyEval_ReInitThreads(_PyRuntimeState *runtime) { PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); #ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; @@ -486,7 +485,7 @@ PyEval_SaveThread(void) #else PyThreadState *tstate = _PyThreadState_Swap(&runtime->gilstate, NULL); #endif - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); struct _ceval_runtime_state *ceval = &runtime->ceval; struct _ceval_state *ceval2 = &tstate->interp->ceval; @@ -502,7 +501,7 @@ PyEval_SaveThread(void) void PyEval_RestoreThread(PyThreadState *tstate) { - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); take_gil(tstate); @@ -944,7 +943,7 @@ eval_frame_handle_pending(PyThreadState *tstate) PyObject* _Py_HOT_FUNCTION _PyEval_EvalFrameDefault(PyThreadState *tstate, PyFrameObject *f, int throwflag) { - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); #ifdef DXPAIRS int lastopcode = 0; diff --git a/Python/errors.c b/Python/errors.c index 70365aaca585b..5d1725679c4bd 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -1426,7 +1426,7 @@ void _PyErr_WriteUnraisableMsg(const char *err_msg_str, PyObject *obj) { PyThreadState *tstate = _PyThreadState_GET(); - assert(tstate != NULL); + _Py_EnsureTstateNotNULL(tstate); PyObject *err_msg = NULL; PyObject *exc_type, *exc_value, *exc_tb; diff --git a/Python/pystate.c b/Python/pystate.c index 119fe31a84ba1..f92c55e747169 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -39,16 +39,6 @@ extern "C" { _Py_atomic_store_relaxed(&(gilstate)->tstate_current, \ (uintptr_t)(value)) -static void -ensure_tstate_not_null(const char *func, PyThreadState *tstate) -{ - if (tstate == NULL) { - _Py_FatalErrorFunc(func, - "current thread state is NULL (released GIL?)"); - } -} - - /* Forward declarations */ static PyThreadState *_PyGILState_GetThisThreadState(struct _gilstate_runtime_state *gilstate); static void _PyThreadState_Delete(PyThreadState *tstate, int check_current); @@ -431,7 +421,7 @@ PyInterpreterState * PyInterpreterState_Get(void) { PyThreadState *tstate = _PyThreadState_GET(); - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); PyInterpreterState *interp = tstate->interp; if (interp == NULL) { Py_FatalError("no current interpreter"); @@ -846,7 +836,7 @@ static void tstate_delete_common(PyThreadState *tstate, struct _gilstate_runtime_state *gilstate) { - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); PyInterpreterState *interp = tstate->interp; if (interp == NULL) { Py_FatalError("NULL interpreter"); @@ -897,7 +887,7 @@ PyThreadState_Delete(PyThreadState *tstate) void _PyThreadState_DeleteCurrent(PyThreadState *tstate) { - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); struct _gilstate_runtime_state *gilstate = &tstate->interp->runtime->gilstate; tstate_delete_common(tstate, gilstate); _PyRuntimeGILState_SetThreadState(gilstate, NULL); @@ -975,7 +965,7 @@ PyThreadState * PyThreadState_Get(void) { PyThreadState *tstate = _PyThreadState_GET(); - ensure_tstate_not_null(__func__, tstate); + _Py_EnsureTstateNotNULL(tstate); return tstate; } diff --git a/Python/sysmodule.c b/Python/sysmodule.c index 914beb7e127fe..e3fe1436145b4 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -457,7 +457,7 @@ static PyObject * sys_audit(PyObject *self, PyObject *const *args, Py_ssize_t argc) { PyThreadState *tstate = _PyThreadState_GET(); - assert(tstate != NULL); + _Py_EnsureTstateNotNULL(tstate); if (argc == 0) { _PyErr_SetString(tstate, PyExc_TypeError, From webhook-mailer at python.org Mon Jun 1 11:44:08 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Mon, 01 Jun 2020 15:44:08 -0000 Subject: [Python-checkins] bpo-40831: Remove an incorrect statement in the Windows docs (GH-20570) Message-ID: https://github.com/python/cpython/commit/c8966667bbdb284c3780ef6cec8a3870935a6bb7 commit: c8966667bbdb284c3780ef6cec8a3870935a6bb7 branch: master author: Zackery Spytz committer: GitHub date: 2020-06-01T16:43:56+01:00 summary: bpo-40831: Remove an incorrect statement in the Windows docs (GH-20570) files: M Doc/using/windows.rst diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 97e9cdfeb0939..b95a43c853c28 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -108,9 +108,7 @@ approximately 32,000 characters. Your administrator will need to activate the to ``1``. This allows the :func:`open` function, the :mod:`os` module and most other -path functionality to accept and return paths longer than 260 characters when -using strings. (Use of bytes as paths is deprecated on Windows, and this feature -is not available when using bytes.) +path functionality to accept and return paths longer than 260 characters. After changing the above option, no further configuration is required. From webhook-mailer at python.org Mon Jun 1 11:53:37 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Jun 2020 15:53:37 -0000 Subject: [Python-checkins] bpo-40831: Remove an incorrect statement in the Windows docs (GH-20570) Message-ID: https://github.com/python/cpython/commit/d0dc369a90e356bf2eba651816feb7ad736ce28a commit: d0dc369a90e356bf2eba651816feb7ad736ce28a branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-01T08:53:33-07:00 summary: bpo-40831: Remove an incorrect statement in the Windows docs (GH-20570) (cherry picked from commit c8966667bbdb284c3780ef6cec8a3870935a6bb7) Co-authored-by: Zackery Spytz files: M Doc/using/windows.rst diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index 636f48dfb012b..5114a26a57d07 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -108,9 +108,7 @@ approximately 32,000 characters. Your administrator will need to activate the to ``1``. This allows the :func:`open` function, the :mod:`os` module and most other -path functionality to accept and return paths longer than 260 characters when -using strings. (Use of bytes as paths is deprecated on Windows, and this feature -is not available when using bytes.) +path functionality to accept and return paths longer than 260 characters. After changing the above option, no further configuration is required. From webhook-mailer at python.org Mon Jun 1 11:53:43 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Jun 2020 15:53:43 -0000 Subject: [Python-checkins] bpo-40831: Remove an incorrect statement in the Windows docs (GH-20570) Message-ID: https://github.com/python/cpython/commit/d7f2fd2ae54161362c7f3d28bd7a1840a796e63d commit: d7f2fd2ae54161362c7f3d28bd7a1840a796e63d branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-01T08:53:39-07:00 summary: bpo-40831: Remove an incorrect statement in the Windows docs (GH-20570) (cherry picked from commit c8966667bbdb284c3780ef6cec8a3870935a6bb7) Co-authored-by: Zackery Spytz files: M Doc/using/windows.rst diff --git a/Doc/using/windows.rst b/Doc/using/windows.rst index f5dddb5a37af8..819d3e8307498 100644 --- a/Doc/using/windows.rst +++ b/Doc/using/windows.rst @@ -108,9 +108,7 @@ approximately 32,000 characters. Your administrator will need to activate the to ``1``. This allows the :func:`open` function, the :mod:`os` module and most other -path functionality to accept and return paths longer than 260 characters when -using strings. (Use of bytes as paths is deprecated on Windows, and this feature -is not available when using bytes.) +path functionality to accept and return paths longer than 260 characters. After changing the above option, no further configuration is required. From webhook-mailer at python.org Mon Jun 1 12:12:32 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Mon, 01 Jun 2020 16:12:32 -0000 Subject: [Python-checkins] bpo-1635741: Port fcntl module to multiphase initialization (GH-20540) Message-ID: https://github.com/python/cpython/commit/e9684fac5a158be9806304a676e619857520a4dc commit: e9684fac5a158be9806304a676e619857520a4dc branch: master author: Dong-hee Na committer: GitHub date: 2020-06-02T01:12:24+09:00 summary: bpo-1635741: Port fcntl module to multiphase initialization (GH-20540) files: A Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-23-35.bpo-1635741.0D-laM.rst M Modules/fcntlmodule.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-23-35.bpo-1635741.0D-laM.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-23-35.bpo-1635741.0D-laM.rst new file mode 100644 index 0000000000000..cd2bcb6e60877 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-23-35.bpo-1635741.0D-laM.rst @@ -0,0 +1 @@ +Port :mod:`fcntl` to multiphase initialization. diff --git a/Modules/fcntlmodule.c b/Modules/fcntlmodule.c index 43f9b22f67207..39baea01ec84e 100644 --- a/Modules/fcntlmodule.c +++ b/Modules/fcntlmodule.c @@ -662,34 +662,31 @@ all_ins(PyObject* m) return 0; } +static int +fcntl_exec(PyObject *module) +{ + if (all_ins(module) < 0) { + return -1; + } + return 0; +} + +static PyModuleDef_Slot fcntl_slots[] = { + {Py_mod_exec, fcntl_exec}, + {0, NULL} +}; static struct PyModuleDef fcntlmodule = { PyModuleDef_HEAD_INIT, - "fcntl", - module_doc, - -1, - fcntl_methods, - NULL, - NULL, - NULL, - NULL + .m_name = "fcntl", + .m_doc = module_doc, + .m_size = 0, + .m_methods = fcntl_methods, + .m_slots = fcntl_slots, }; PyMODINIT_FUNC PyInit_fcntl(void) { - PyObject *m; - - /* Create the module and add the functions and documentation */ - m = PyModule_Create(&fcntlmodule); - if (m == NULL) - return NULL; - - /* Add some symbolic constants to the module */ - if (all_ins(m) < 0) { - Py_DECREF(m); - return NULL; - } - - return m; + return PyModuleDef_Init(&fcntlmodule); } From webhook-mailer at python.org Mon Jun 1 12:54:28 2020 From: webhook-mailer at python.org (Hai Shi) Date: Mon, 01 Jun 2020 16:54:28 -0000 Subject: [Python-checkins] bpo-39593: Add test on ctypes cfield.c s_set() (GH-18424) Message-ID: https://github.com/python/cpython/commit/a97011b9b8c8111f42e1e7594081956136d848da commit: a97011b9b8c8111f42e1e7594081956136d848da branch: master author: Hai Shi committer: GitHub date: 2020-06-01T18:54:18+02:00 summary: bpo-39593: Add test on ctypes cfield.c s_set() (GH-18424) files: M Lib/ctypes/test/test_struct_fields.py M Modules/_ctypes/cfield.c diff --git a/Lib/ctypes/test/test_struct_fields.py b/Lib/ctypes/test/test_struct_fields.py index 8045cc82679cc..ee8415f3e630c 100644 --- a/Lib/ctypes/test/test_struct_fields.py +++ b/Lib/ctypes/test/test_struct_fields.py @@ -46,6 +46,14 @@ class Y(X): Y._fields_ = [] self.assertRaises(AttributeError, setattr, X, "_fields_", []) + def test_5(self): + class X(Structure): + _fields_ = (("char", c_char * 5),) + + x = X(b'#' * 5) + x.char = b'a\0b\0' + self.assertEqual(bytes(x), b'a\x00###') + # __set__ and __get__ should raise a TypeError in case their self # argument is not a ctype instance. def test___set__(self): diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 7f853190a785e..32a2beeb744f7 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -1263,7 +1263,9 @@ s_set(void *ptr, PyObject *value, Py_ssize_t length) } data = PyBytes_AS_STRING(value); - size = strlen(data); /* XXX Why not Py_SIZE(value)? */ + // bpo-39593: Use strlen() to truncate the string at the first null character. + size = strlen(data); + if (size < length) { /* This will copy the terminating NUL character * if there is space for it. From webhook-mailer at python.org Mon Jun 1 13:21:49 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Mon, 01 Jun 2020 17:21:49 -0000 Subject: [Python-checkins] bpo-39943: Fix MSVC warnings in sre extension (GH-20508) Message-ID: https://github.com/python/cpython/commit/06e3a27a3c863495390a07c695171a8e62a6e0d2 commit: 06e3a27a3c863495390a07c695171a8e62a6e0d2 branch: master author: Ammar Askar committer: GitHub date: 2020-06-01T19:21:43+02:00 summary: bpo-39943: Fix MSVC warnings in sre extension (GH-20508) files: M Modules/_sre.c M Modules/sre_lib.h diff --git a/Modules/_sre.c b/Modules/_sre.c index 244e4f1f84dff..bdc427822d7e1 100644 --- a/Modules/_sre.c +++ b/Modules/_sre.c @@ -454,7 +454,10 @@ state_init(SRE_STATE* state, PatternObject* pattern, PyObject* string, return string; err: - PyMem_Del(state->mark); + /* We add an explicit cast here because MSVC has a bug when + compiling C code where it believes that `const void**` cannot be + safely casted to `void*`, see bpo-39943 for details. */ + PyMem_Del((void*) state->mark); state->mark = NULL; if (state->buffer.buf) PyBuffer_Release(&state->buffer); @@ -468,7 +471,8 @@ state_fini(SRE_STATE* state) PyBuffer_Release(&state->buffer); Py_XDECREF(state->string); data_stack_dealloc(state); - PyMem_Del(state->mark); + /* See above PyMem_Del for why we explicitly cast here. */ + PyMem_Del((void*) state->mark); state->mark = NULL; } diff --git a/Modules/sre_lib.h b/Modules/sre_lib.h index 9cc786321c560..2657d8d82c6f1 100644 --- a/Modules/sre_lib.h +++ b/Modules/sre_lib.h @@ -448,12 +448,15 @@ do { \ state->data_stack_base += size; \ } while (0) +/* We add an explicit cast to memcpy here because MSVC has a bug when + compiling C code where it believes that `const void**` cannot be + safely casted to `void*`, see bpo-39943 for details. */ #define DATA_STACK_POP(state, data, size, discard) \ do { \ TRACE(("copy data to %p from %" PY_FORMAT_SIZE_T "d " \ "(%" PY_FORMAT_SIZE_T "d)\n", \ data, state->data_stack_base-size, size)); \ - memcpy(data, state->data_stack+state->data_stack_base-size, size); \ + memcpy((void*) data, state->data_stack+state->data_stack_base-size, size); \ if (discard) \ state->data_stack_base -= size; \ } while (0) From webhook-mailer at python.org Mon Jun 1 13:26:38 2020 From: webhook-mailer at python.org (Huon Wilson) Date: Mon, 01 Jun 2020 17:26:38 -0000 Subject: [Python-checkins] bpo-40630: adjust tracemalloc.reset_peak docs for backport to 3.9 (GH-20546) Message-ID: https://github.com/python/cpython/commit/39de8e4b6f139f8d8284732bd7bb6e5ccced29fa commit: 39de8e4b6f139f8d8284732bd7bb6e5ccced29fa branch: master author: Huon Wilson committer: GitHub date: 2020-06-01T19:26:33+02:00 summary: bpo-40630: adjust tracemalloc.reset_peak docs for backport to 3.9 (GH-20546) files: D Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst M Doc/library/tracemalloc.rst M Doc/whatsnew/3.10.rst M Doc/whatsnew/3.9.rst diff --git a/Doc/library/tracemalloc.rst b/Doc/library/tracemalloc.rst index fba1caab455d7..20f668c728202 100644 --- a/Doc/library/tracemalloc.rst +++ b/Doc/library/tracemalloc.rst @@ -345,7 +345,7 @@ Functions See also :func:`get_traced_memory`. - .. versionadded:: 3.10 + .. versionadded:: 3.9 .. function:: get_tracemalloc_memory() diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 8a6b02179db17..95c5aa7ec6e6b 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -95,13 +95,6 @@ New Modules Improved Modules ================ -tracemalloc ------------ - -Added :func:`tracemalloc.reset_peak` to set the peak size of traced memory -blocks to the current size, to measure the peak of specific pieces of code. -(Contributed by Huon Wilson in :issue:`40630`.) - Optimizations ============= diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index a468130af1083..ccc84cced1090 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -562,6 +562,12 @@ Previously, :attr:`sys.stderr` was block-buffered when non-interactive. Now ``stderr`` defaults to always being line-buffered. (Contributed by Jendrik Seipp in :issue:`13601`.) +tracemalloc +----------- + +Added :func:`tracemalloc.reset_peak` to set the peak size of traced memory +blocks to the current size, to measure the peak of specific pieces of code. +(Contributed by Huon Wilson in :issue:`40630`.) typing ------ diff --git a/Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst b/Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst deleted file mode 100644 index bb2e7452d3cfb..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-15-13-40-15.bpo-40630.YXEX_M.rst +++ /dev/null @@ -1,2 +0,0 @@ -Added :func:`tracemalloc.reset_peak` to set the peak size of traced memory -blocks to the current size, to measure the peak of specific pieces of code. From webhook-mailer at python.org Mon Jun 1 14:34:24 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 01 Jun 2020 18:34:24 -0000 Subject: [Python-checkins] bpo-40826: PyOS_InterruptOccurred() requires GIL (GH-20578) Message-ID: https://github.com/python/cpython/commit/cbe129692293251e7fbcea9ff0d822824d90c140 commit: cbe129692293251e7fbcea9ff0d822824d90c140 branch: master author: Victor Stinner committer: GitHub date: 2020-06-01T20:34:15+02:00 summary: bpo-40826: PyOS_InterruptOccurred() requires GIL (GH-20578) PyOS_InterruptOccurred() now fails with a fatal error if it is called with the GIL released. files: A Misc/NEWS.d/next/C API/2020-06-01-16-12-37.bpo-40826.zQzFoK.rst M Modules/signalmodule.c diff --git a/Misc/NEWS.d/next/C API/2020-06-01-16-12-37.bpo-40826.zQzFoK.rst b/Misc/NEWS.d/next/C API/2020-06-01-16-12-37.bpo-40826.zQzFoK.rst new file mode 100644 index 0000000000000..0d7a36c3eb401 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-01-16-12-37.bpo-40826.zQzFoK.rst @@ -0,0 +1,2 @@ +:c:func:`PyOS_InterruptOccurred` now fails with a fatal error if it is +called with the GIL released. diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 8348971c353ba..6d340a68634af 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -1782,8 +1782,9 @@ PyOS_FiniInterrupts(void) int PyOS_InterruptOccurred(void) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - if (!_Py_ThreadCanHandleSignals(interp)) { + PyThreadState *tstate = _PyThreadState_GET(); + _Py_EnsureTstateNotNULL(tstate); + if (!_Py_ThreadCanHandleSignals(tstate->interp)) { return 0; } From webhook-mailer at python.org Mon Jun 1 14:36:05 2020 From: webhook-mailer at python.org (Skip Montanaro) Date: Mon, 01 Jun 2020 18:36:05 -0000 Subject: [Python-checkins] bpo-39583: Remove superfluous "extern C" bits from Include/cpython/*.h (GH-18413) Message-ID: https://github.com/python/cpython/commit/b4d5a5cca29426a282e8f1e64b2271fdd1f0a23e commit: b4d5a5cca29426a282e8f1e64b2271fdd1f0a23e branch: master author: Skip Montanaro committer: GitHub date: 2020-06-01T20:35:56+02:00 summary: bpo-39583: Remove superfluous "extern C" bits from Include/cpython/*.h (GH-18413) files: A Misc/NEWS.d/next/C API/2020-02-08-08-01-35.bpo-39583.qURKSl.rst M Include/cpython/abstract.h M Include/cpython/ceval.h M Include/cpython/dictobject.h M Include/cpython/fileobject.h M Include/cpython/frameobject.h M Include/cpython/import.h M Include/cpython/initconfig.h M Include/cpython/interpreteridobject.h M Include/cpython/listobject.h M Include/cpython/object.h M Include/cpython/objimpl.h M Include/cpython/pyerrors.h M Include/cpython/pylifecycle.h M Include/cpython/pymem.h M Include/cpython/pystate.h M Include/cpython/sysmodule.h M Include/cpython/traceback.h M Include/cpython/tupleobject.h M Include/cpython/unicodeobject.h diff --git a/Include/cpython/abstract.h b/Include/cpython/abstract.h index aa72f998b701c..b5b6e4819788c 100644 --- a/Include/cpython/abstract.h +++ b/Include/cpython/abstract.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - /* === Object Protocol ================================================== */ #ifdef PY_SSIZE_T_CLEAN @@ -380,8 +376,4 @@ PyAPI_FUNC(void) _Py_add_one_to_index_C(int nd, Py_ssize_t *index, PyAPI_FUNC(int) _Py_convert_optional_to_ssize_t(PyObject *, void *); /* Same as PyNumber_Index but can return an instance of a subclass of int. */ -PyAPI_FUNC(PyObject *) _PyNumber_Index(PyObject *o); - -#ifdef __cplusplus -} -#endif +PyAPI_FUNC(PyObject *) _PyNumber_Index(PyObject *o); \ No newline at end of file diff --git a/Include/cpython/ceval.h b/Include/cpython/ceval.h index e1922a677bd38..06338928f6738 100644 --- a/Include/cpython/ceval.h +++ b/Include/cpython/ceval.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - PyAPI_FUNC(void) PyEval_SetProfile(Py_tracefunc, PyObject *); PyAPI_DATA(int) _PyEval_SetProfile(PyThreadState *tstate, Py_tracefunc func, PyObject *arg); PyAPI_FUNC(void) PyEval_SetTrace(Py_tracefunc, PyObject *); @@ -32,7 +28,3 @@ PyAPI_FUNC(Py_ssize_t) _PyEval_RequestCodeExtraIndex(freefunc); PyAPI_FUNC(int) _PyEval_SliceIndex(PyObject *, Py_ssize_t *); PyAPI_FUNC(int) _PyEval_SliceIndexNotNone(PyObject *, Py_ssize_t *); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/dictobject.h b/Include/cpython/dictobject.h index e33a0d156fead..ffe0e97fb35f3 100644 --- a/Include/cpython/dictobject.h +++ b/Include/cpython/dictobject.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - typedef struct _dictkeysobject PyDictKeysObject; /* The ma_values pointer is NULL for a combined table @@ -86,7 +82,3 @@ typedef struct { PyAPI_FUNC(PyObject *) _PyDictView_New(PyObject *, PyTypeObject *); PyAPI_FUNC(PyObject *) _PyDictView_Intersect(PyObject* self, PyObject *other); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/fileobject.h b/Include/cpython/fileobject.h index 57eac13c064c2..4f2408c7e8760 100644 --- a/Include/cpython/fileobject.h +++ b/Include/cpython/fileobject.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - PyAPI_FUNC(char *) Py_UniversalNewlineFgets(char *, int, FILE*, PyObject *); #if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03060000 @@ -26,7 +22,3 @@ typedef PyObject * (*Py_OpenCodeHookFunction)(PyObject *, void *); PyAPI_FUNC(PyObject *) PyFile_OpenCode(const char *utf8path); PyAPI_FUNC(PyObject *) PyFile_OpenCodeObject(PyObject *path); PyAPI_FUNC(int) PyFile_SetOpenCodeHook(Py_OpenCodeHookFunction hook, void *userData); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/frameobject.h b/Include/cpython/frameobject.h index 36a51baae8784..c76fbe0616cb2 100644 --- a/Include/cpython/frameobject.h +++ b/Include/cpython/frameobject.h @@ -4,10 +4,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - typedef struct { int b_type; /* what kind of block this is */ int b_handler; /* where to jump to find handler */ @@ -78,7 +74,3 @@ PyAPI_FUNC(void) PyFrame_FastToLocals(PyFrameObject *); PyAPI_FUNC(void) _PyFrame_DebugMallocStats(FILE *out); PyAPI_FUNC(PyFrameObject *) PyFrame_GetBack(PyFrameObject *frame); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/import.h b/Include/cpython/import.h index c1b47121f1246..3b20a74c855db 100644 --- a/Include/cpython/import.h +++ b/Include/cpython/import.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - PyMODINIT_FUNC PyInit__imp(void); PyAPI_FUNC(int) _PyImport_IsInitialized(PyInterpreterState *); @@ -44,7 +40,3 @@ struct _frozen { collection of frozen modules: */ PyAPI_DATA(const struct _frozen *) PyImport_FrozenModules; - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index df93a5539d48b..e9c2e6bec3861 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -1,9 +1,6 @@ #ifndef Py_PYCORECONFIG_H #define Py_PYCORECONFIG_H #ifndef Py_LIMITED_API -#ifdef __cplusplus -extern "C" { -#endif /* --- PyStatus ----------------------------------------------- */ @@ -438,8 +435,5 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config, PyWideStringList *list, Py_ssize_t length, wchar_t **items); -#ifdef __cplusplus -} -#endif #endif /* !Py_LIMITED_API */ #endif /* !Py_PYCORECONFIG_H */ diff --git a/Include/cpython/interpreteridobject.h b/Include/cpython/interpreteridobject.h index 67ec5873542d8..5076584209b90 100644 --- a/Include/cpython/interpreteridobject.h +++ b/Include/cpython/interpreteridobject.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - /* Interpreter ID Object */ PyAPI_DATA(PyTypeObject) _PyInterpreterID_Type; @@ -13,7 +9,3 @@ PyAPI_DATA(PyTypeObject) _PyInterpreterID_Type; PyAPI_FUNC(PyObject *) _PyInterpreterID_New(int64_t); PyAPI_FUNC(PyObject *) _PyInterpreterState_GetIDObject(PyInterpreterState *); PyAPI_FUNC(PyInterpreterState *) _PyInterpreterID_LookUp(PyObject *); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/listobject.h b/Include/cpython/listobject.h index 74fe3301a7ab7..b1af5f6764427 100644 --- a/Include/cpython/listobject.h +++ b/Include/cpython/listobject.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - typedef struct { PyObject_VAR_HEAD /* Vector of pointers to list elements. list[0] is ob_item[0], etc. */ @@ -37,7 +33,3 @@ PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); #define PyList_SET_ITEM(op, i, v) (_PyList_CAST(op)->ob_item[i] = (v)) #define PyList_GET_SIZE(op) Py_SIZE(_PyList_CAST(op)) #define _PyList_ITEMS(op) (_PyList_CAST(op)->ob_item) - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 444f832f5bd8d..304cfbfc37dff 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - PyAPI_FUNC(void) _Py_NewReference(PyObject *op); #ifdef Py_TRACE_REFS @@ -548,7 +544,3 @@ PyAPI_FUNC(void) _PyTrash_end(struct _ts *tstate); * unconditionally */ #define Py_TRASHCAN_SAFE_BEGIN(op) Py_TRASHCAN_BEGIN_CONDITION(op, 1) #define Py_TRASHCAN_SAFE_END(op) Py_TRASHCAN_END - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/objimpl.h b/Include/cpython/objimpl.h index b835936db7011..ca4009bcdb4c1 100644 --- a/Include/cpython/objimpl.h +++ b/Include/cpython/objimpl.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - #define _PyObject_SIZE(typeobj) ( (typeobj)->tp_basicsize ) /* _PyObject_VAR_SIZE returns the number of bytes (as size_t) allocated for a @@ -139,7 +135,3 @@ PyAPI_FUNC(PyObject *) _PyObject_GC_Calloc(size_t size); #define PyType_SUPPORTS_WEAKREFS(t) ((t)->tp_weaklistoffset > 0) PyAPI_FUNC(PyObject **) PyObject_GET_WEAKREFS_LISTPTR(PyObject *op); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/pyerrors.h b/Include/cpython/pyerrors.h index dd3c2caa0cc04..3f347dc2e2d62 100644 --- a/Include/cpython/pyerrors.h +++ b/Include/cpython/pyerrors.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - /* Error objects */ /* PyException_HEAD defines the initial segment of every exception class. */ @@ -188,7 +184,3 @@ PyAPI_FUNC(void) _Py_NO_RETURN _Py_FatalErrorFormat( ...); #define Py_FatalError(message) _Py_FatalErrorFunc(__func__, message) - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/pylifecycle.h b/Include/cpython/pylifecycle.h index eb523b82e182d..f38ec5a4ae399 100644 --- a/Include/cpython/pylifecycle.h +++ b/Include/cpython/pylifecycle.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - /* Only used by applications that embed the interpreter and need to * override the standard encoding determination mechanism */ @@ -66,7 +62,3 @@ PyAPI_FUNC(int) _Py_LegacyLocaleDetected(int warn); PyAPI_FUNC(char *) _Py_SetLocaleFromEnv(int category); PyAPI_FUNC(PyThreadState *) _Py_NewInterpreter(int isolated_subinterpreter); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/pymem.h b/Include/cpython/pymem.h index 79f063b121753..61d719584584e 100644 --- a/Include/cpython/pymem.h +++ b/Include/cpython/pymem.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - PyAPI_FUNC(void *) PyMem_RawMalloc(size_t size); PyAPI_FUNC(void *) PyMem_RawCalloc(size_t nelem, size_t elsize); PyAPI_FUNC(void *) PyMem_RawRealloc(void *ptr, size_t new_size); @@ -102,7 +98,3 @@ PyAPI_FUNC(void) PyMem_SetAllocator(PyMemAllocatorDomain domain, The function does nothing if Python is not compiled is debug mode. */ PyAPI_FUNC(void) PyMem_SetupDebugHooks(void); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/pystate.h b/Include/cpython/pystate.h index f292da1d3c6c5..42a7fc163064d 100644 --- a/Include/cpython/pystate.h +++ b/Include/cpython/pystate.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - #include "cpython/initconfig.h" PyAPI_FUNC(int) _PyInterpreterState_RequiresIDRef(PyInterpreterState *); @@ -257,7 +253,3 @@ typedef int (*crossinterpdatafunc)(PyObject *, struct _xid *); PyAPI_FUNC(int) _PyCrossInterpreterData_RegisterClass(PyTypeObject *, crossinterpdatafunc); PyAPI_FUNC(crossinterpdatafunc) _PyCrossInterpreterData_Lookup(PyObject *); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/sysmodule.h b/Include/cpython/sysmodule.h index 1802b5b300018..fc4c899b3fe3d 100644 --- a/Include/cpython/sysmodule.h +++ b/Include/cpython/sysmodule.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - PyAPI_FUNC(PyObject *) _PySys_GetObjectId(_Py_Identifier *key); PyAPI_FUNC(int) _PySys_SetObjectId(_Py_Identifier *key, PyObject *); @@ -18,7 +14,3 @@ PyAPI_FUNC(int) PySys_Audit( const char *argFormat, ...); PyAPI_FUNC(int) PySys_AddAuditHook(Py_AuditHookFunction, void*); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/traceback.h b/Include/cpython/traceback.h index 837470c3ba2bc..aac5b42c344d3 100644 --- a/Include/cpython/traceback.h +++ b/Include/cpython/traceback.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - typedef struct _traceback { PyObject_HEAD struct _traceback *tb_next; @@ -16,7 +12,3 @@ typedef struct _traceback { PyAPI_FUNC(int) _Py_DisplaySourceLine(PyObject *, PyObject *, int, int); PyAPI_FUNC(void) _PyTraceback_Add(const char *, const char *, int); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/tupleobject.h b/Include/cpython/tupleobject.h index 1565f2a5c3d98..51dcd4237be18 100644 --- a/Include/cpython/tupleobject.h +++ b/Include/cpython/tupleobject.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - typedef struct { PyObject_VAR_HEAD /* ob_item contains space for 'ob_size' elements. @@ -30,7 +26,3 @@ PyAPI_FUNC(void) _PyTuple_MaybeUntrack(PyObject *); #define PyTuple_SET_ITEM(op, i, v) (_PyTuple_CAST(op)->ob_item[i] = v) PyAPI_FUNC(void) _PyTuple_DebugMallocStats(FILE *out); - -#ifdef __cplusplus -} -#endif diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 4fd674ffea36e..3b49ce7759037 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -2,10 +2,6 @@ # error "this header file must not be included directly" #endif -#ifdef __cplusplus -extern "C" { -#endif - /* Py_UNICODE was the native Unicode storage format (code unit) used by Python and represents a single Unicode element in the Unicode type. With PEP 393, Py_UNICODE is deprecated and replaced with a @@ -1221,7 +1217,3 @@ PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*); PyAPI_FUNC(int) _PyUnicode_EQ(PyObject *, PyObject *); PyAPI_FUNC(Py_ssize_t) _PyUnicode_ScanIdentifier(PyObject *); - -#ifdef __cplusplus -} -#endif diff --git a/Misc/NEWS.d/next/C API/2020-02-08-08-01-35.bpo-39583.qURKSl.rst b/Misc/NEWS.d/next/C API/2020-02-08-08-01-35.bpo-39583.qURKSl.rst new file mode 100644 index 0000000000000..1c9f44f7443c1 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-02-08-08-01-35.bpo-39583.qURKSl.rst @@ -0,0 +1 @@ +Remove superfluous "extern C" declarations from ``Include/cpython/*.h``. From webhook-mailer at python.org Mon Jun 1 14:59:44 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 01 Jun 2020 18:59:44 -0000 Subject: [Python-checkins] bpo-40826: Fix GIL usage in PyOS_Readline() (GH-20579) Message-ID: https://github.com/python/cpython/commit/c353764fd564e401cf47a5d9efab18c72c60014e commit: c353764fd564e401cf47a5d9efab18c72c60014e branch: master author: Victor Stinner committer: GitHub date: 2020-06-01T20:59:35+02:00 summary: bpo-40826: Fix GIL usage in PyOS_Readline() (GH-20579) Fix GIL usage in PyOS_Readline(): lock the GIL to set an exception. Pass tstate to my_fgets() and _PyOS_WindowsConsoleReadline(). Cleanup these functions. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst M Parser/myreadline.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst new file mode 100644 index 0000000000000..f79f20d21d49c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst @@ -0,0 +1 @@ +Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set an exception. diff --git a/Parser/myreadline.c b/Parser/myreadline.c index 04c2793225cb3..d2787f0d345cf 100644 --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -27,22 +27,24 @@ int (*PyOS_InputHook)(void) = NULL; except if PyOS_InterruptOccurred() returns true. */ static int -my_fgets(char *buf, int len, FILE *fp) +my_fgets(PyThreadState* tstate, char *buf, int len, FILE *fp) { #ifdef MS_WINDOWS HANDLE hInterruptEvent; #endif - char *p; - int err; while (1) { - if (PyOS_InputHook != NULL) + if (PyOS_InputHook != NULL) { (void)(PyOS_InputHook)(); + } + errno = 0; clearerr(fp); - p = fgets(buf, len, fp); - if (p != NULL) + char *p = fgets(buf, len, fp); + if (p != NULL) { return 0; /* No error */ - err = errno; + } + int err = errno; + #ifdef MS_WINDOWS /* Ctrl-C anywhere on the line or Ctrl-Z if the only character on a line will set ERROR_OPERATION_ABORTED. Under normal @@ -68,22 +70,26 @@ my_fgets(char *buf, int len, FILE *fp) } } #endif /* MS_WINDOWS */ + if (feof(fp)) { clearerr(fp); return -1; /* EOF */ } + #ifdef EINTR if (err == EINTR) { - int s; - PyEval_RestoreThread(_PyOS_ReadlineTState); - s = PyErr_CheckSignals(); + PyEval_RestoreThread(tstate); + int s = PyErr_CheckSignals(); PyEval_SaveThread(); - if (s < 0) - return 1; - /* try again */ + + if (s < 0) { + return 1; + } + /* try again */ continue; } #endif + if (PyOS_InterruptOccurred()) { return 1; /* Interrupt */ } @@ -98,7 +104,7 @@ my_fgets(char *buf, int len, FILE *fp) extern char _get_console_type(HANDLE handle); char * -_PyOS_WindowsConsoleReadline(HANDLE hStdIn) +_PyOS_WindowsConsoleReadline(PyThreadState *tstate, HANDLE hStdIn) { static wchar_t wbuf_local[1024 * 16]; const DWORD chunk_size = 1024; @@ -133,11 +139,12 @@ _PyOS_WindowsConsoleReadline(HANDLE hStdIn) if (WaitForSingleObjectEx(hInterruptEvent, 100, FALSE) == WAIT_OBJECT_0) { ResetEvent(hInterruptEvent); - PyEval_RestoreThread(_PyOS_ReadlineTState); + PyEval_RestoreThread(tstate); s = PyErr_CheckSignals(); PyEval_SaveThread(); - if (s < 0) + if (s < 0) { goto exit; + } } break; } @@ -150,17 +157,22 @@ _PyOS_WindowsConsoleReadline(HANDLE hStdIn) if (wbuf == wbuf_local) { wbuf[total_read] = '\0'; wbuf = (wchar_t*)PyMem_RawMalloc(wbuflen * sizeof(wchar_t)); - if (wbuf) + if (wbuf) { wcscpy_s(wbuf, wbuflen, wbuf_local); + } else { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); goto exit; } } else { wchar_t *tmp = PyMem_RawRealloc(wbuf, wbuflen * sizeof(wchar_t)); if (tmp == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); goto exit; } wbuf = tmp; @@ -169,33 +181,45 @@ _PyOS_WindowsConsoleReadline(HANDLE hStdIn) if (wbuf[0] == '\x1a') { buf = PyMem_RawMalloc(1); - if (buf) + if (buf) { buf[0] = '\0'; + } else { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); } goto exit; } - u8len = WideCharToMultiByte(CP_UTF8, 0, wbuf, total_read, NULL, 0, NULL, NULL); + u8len = WideCharToMultiByte(CP_UTF8, 0, + wbuf, total_read, + NULL, 0, + NULL, NULL); buf = PyMem_RawMalloc(u8len + 1); if (buf == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); goto exit; } - u8len = WideCharToMultiByte(CP_UTF8, 0, wbuf, total_read, buf, u8len, NULL, NULL); + + u8len = WideCharToMultiByte(CP_UTF8, 0, + wbuf, total_read, + buf, u8len, + NULL, NULL); buf[u8len] = '\0'; exit: - if (wbuf != wbuf_local) + if (wbuf != wbuf_local) { PyMem_RawFree(wbuf); + } if (err) { - PyEval_RestoreThread(_PyOS_ReadlineTState); + PyEval_RestoreThread(tstate); PyErr_SetFromWindowsErr(err); PyEval_SaveThread(); } - return buf; } @@ -209,6 +233,8 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) { size_t n; char *p, *pr; + PyThreadState *tstate = _PyOS_ReadlineTState; + assert(tstate != NULL); #ifdef MS_WINDOWS if (!Py_LegacyWindowsStdioFlag && sys_stdin == stdin) { @@ -230,7 +256,9 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) if (wlen) { wbuf = PyMem_RawMalloc(wlen * sizeof(wchar_t)); if (wbuf == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } wlen = MultiByteToWideChar(CP_UTF8, 0, prompt, -1, @@ -249,7 +277,7 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) } } clearerr(sys_stdin); - return _PyOS_WindowsConsoleReadline(hStdIn); + return _PyOS_WindowsConsoleReadline(tstate, hStdIn); } } #endif @@ -257,16 +285,19 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) n = 100; p = (char *)PyMem_RawMalloc(n); if (p == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } fflush(sys_stdout); - if (prompt) + if (prompt) { fprintf(stderr, "%s", prompt); + } fflush(stderr); - switch (my_fgets(p, (int)n, sys_stdin)) { + switch (my_fgets(tstate, p, (int)n, sys_stdin)) { case 0: /* Normal case */ break; case 1: /* Interrupt */ @@ -278,29 +309,40 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) *p = '\0'; break; } + n = strlen(p); while (n > 0 && p[n-1] != '\n') { size_t incr = n+2; if (incr > INT_MAX) { PyMem_RawFree(p); + PyEval_RestoreThread(tstate); PyErr_SetString(PyExc_OverflowError, "input line too long"); + PyEval_SaveThread(); return NULL; } + pr = (char *)PyMem_RawRealloc(p, n + incr); if (pr == NULL) { PyMem_RawFree(p); + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } p = pr; - if (my_fgets(p+n, (int)incr, sys_stdin) != 0) + + if (my_fgets(tstate, p+n, (int)incr, sys_stdin) != 0) { break; + } n += strlen(p+n); } + pr = (char *)PyMem_RawRealloc(p, n+1); if (pr == NULL) { PyMem_RawFree(p); + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } return pr; @@ -323,7 +365,8 @@ PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) char *rv, *res; size_t len; - if (_PyOS_ReadlineTState == _PyThreadState_GET()) { + PyThreadState *tstate = _PyThreadState_GET(); + if (_PyOS_ReadlineTState == tstate) { PyErr_SetString(PyExc_RuntimeError, "can't re-enter readline"); return NULL; @@ -342,7 +385,7 @@ PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) } } - _PyOS_ReadlineTState = _PyThreadState_GET(); + _PyOS_ReadlineTState = tstate; Py_BEGIN_ALLOW_THREADS PyThread_acquire_lock(_PyOS_ReadlineLock, 1); From webhook-mailer at python.org Mon Jun 1 17:17:28 2020 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 01 Jun 2020 21:17:28 -0000 Subject: [Python-checkins] Ensure correct version of Sphinx is used for Windows builds (GH-20582) Message-ID: https://github.com/python/cpython/commit/fe5dd78182dbf4937bcc2b113ca7526bfad0192b commit: fe5dd78182dbf4937bcc2b113ca7526bfad0192b branch: master author: Steve Dower committer: GitHub date: 2020-06-01T22:17:23+01:00 summary: Ensure correct version of Sphinx is used for Windows builds (GH-20582) files: M Doc/make.bat diff --git a/Doc/make.bat b/Doc/make.bat index 6f8f172e95eb8..7fde063642771 100644 --- a/Doc/make.bat +++ b/Doc/make.bat @@ -13,7 +13,7 @@ if not defined SPHINXBUILD ( %PYTHON% -c "import sphinx" > nul 2> nul if errorlevel 1 ( echo Installing sphinx with %PYTHON% - %PYTHON% -m pip install sphinx + %PYTHON% -m pip install sphinx==2.2.0 if errorlevel 1 exit /B ) set SPHINXBUILD=%PYTHON% -c "import sphinx.cmd.build, sys; sys.exit(sphinx.cmd.build.main())" From webhook-mailer at python.org Mon Jun 1 17:23:28 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Jun 2020 21:23:28 -0000 Subject: [Python-checkins] Ensure correct version of Sphinx is used for Windows builds (GH-20582) Message-ID: https://github.com/python/cpython/commit/b640ca1f3e52771dd70a3442780c3eb8d902f3b3 commit: b640ca1f3e52771dd70a3442780c3eb8d902f3b3 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-01T14:23:18-07:00 summary: Ensure correct version of Sphinx is used for Windows builds (GH-20582) (cherry picked from commit fe5dd78182dbf4937bcc2b113ca7526bfad0192b) Co-authored-by: Steve Dower files: M Doc/make.bat diff --git a/Doc/make.bat b/Doc/make.bat index 2f21e6d52ef91..63e0bd7236475 100644 --- a/Doc/make.bat +++ b/Doc/make.bat @@ -13,7 +13,7 @@ if not defined SPHINXBUILD ( %PYTHON% -c "import sphinx" > nul 2> nul if errorlevel 1 ( echo Installing sphinx with %PYTHON% - %PYTHON% -m pip install sphinx + %PYTHON% -m pip install sphinx==2.2.0 if errorlevel 1 exit /B ) set SPHINXBUILD=%PYTHON% -c "import sphinx.cmd.build, sys; sys.exit(sphinx.cmd.build.main())" From webhook-mailer at python.org Mon Jun 1 17:25:37 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 01 Jun 2020 21:25:37 -0000 Subject: [Python-checkins] Ensure correct version of Sphinx is used for Windows builds (GH-20582) Message-ID: https://github.com/python/cpython/commit/139f1bafcf0bf48f8f55464523a4c5e50ddb50fd commit: 139f1bafcf0bf48f8f55464523a4c5e50ddb50fd branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-01T14:25:32-07:00 summary: Ensure correct version of Sphinx is used for Windows builds (GH-20582) (cherry picked from commit fe5dd78182dbf4937bcc2b113ca7526bfad0192b) Co-authored-by: Steve Dower files: M Doc/make.bat diff --git a/Doc/make.bat b/Doc/make.bat index 6f8f172e95eb8..7fde063642771 100644 --- a/Doc/make.bat +++ b/Doc/make.bat @@ -13,7 +13,7 @@ if not defined SPHINXBUILD ( %PYTHON% -c "import sphinx" > nul 2> nul if errorlevel 1 ( echo Installing sphinx with %PYTHON% - %PYTHON% -m pip install sphinx + %PYTHON% -m pip install sphinx==2.2.0 if errorlevel 1 exit /B ) set SPHINXBUILD=%PYTHON% -c "import sphinx.cmd.build, sys; sys.exit(sphinx.cmd.build.main())" From webhook-mailer at python.org Mon Jun 1 21:17:53 2020 From: webhook-mailer at python.org (Sanyam Khurana) Date: Tue, 02 Jun 2020 01:17:53 -0000 Subject: [Python-checkins] bpo-26543: Fix IMAP4.noop when debug mode is enabled (GH-15206) Message-ID: https://github.com/python/cpython/commit/8a3d2af997e3702eac4c5b012537be39ada36888 commit: 8a3d2af997e3702eac4c5b012537be39ada36888 branch: master author: Sanyam Khurana <8039608+CuriousLearner at users.noreply.github.com> committer: GitHub date: 2020-06-02T03:17:45+02:00 summary: bpo-26543: Fix IMAP4.noop when debug mode is enabled (GH-15206) files: A Misc/NEWS.d/next/Library/2019-08-11-16-28-03.bpo-26543.X-TJZO.rst M Lib/imaplib.py M Lib/test/test_imaplib.py diff --git a/Lib/imaplib.py b/Lib/imaplib.py index d9720f20c3902..73184396d894a 100644 --- a/Lib/imaplib.py +++ b/Lib/imaplib.py @@ -1251,13 +1251,12 @@ def _mesg(self, s, secs=None): sys.stderr.write(' %s.%02d %s\n' % (tm, (secs*100)%100, s)) sys.stderr.flush() - def _dump_ur(self, dict): - # Dump untagged responses (in `dict'). - l = dict.items() - if not l: return - t = '\n\t\t' - l = map(lambda x:'%s: "%s"' % (x[0], x[1][0] and '" "'.join(x[1]) or ''), l) - self._mesg('untagged responses dump:%s%s' % (t, t.join(l))) + def _dump_ur(self, untagged_resp_dict): + if not untagged_resp_dict: + return + items = (f'{key}: {value!r}' + for key, value in untagged_resp_dict.items()) + self._mesg('untagged responses dump:' + '\n\t\t'.join(items)) def _log(self, line): # Keep log of last `_cmd_log_len' interactions for debugging. diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index 0fcc1fb99a289..f93efba794952 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -933,6 +933,20 @@ def test_with_statement_logout(self): self.assertIsNone(server.logged) self.assertIsNone(server.logged) + @threading_helper.reap_threads + @cpython_only + def test_dump_ur(self): + # See: http://bugs.python.org/issue26543 + untagged_resp_dict = {'READ-WRITE': [b'']} + + with self.reaped_server(SimpleIMAPHandler) as server: + with self.imap_class(*server.server_address) as imap: + with mock.patch.object(imap, '_mesg') as mock_mesg: + imap._dump_ur(untagged_resp_dict) + mock_mesg.assert_called_with( + "untagged responses dump:READ-WRITE: [b'']" + ) + @unittest.skipUnless(ssl, "SSL not available") class ThreadedNetworkedTestsSSL(ThreadedNetworkedTests): diff --git a/Misc/NEWS.d/next/Library/2019-08-11-16-28-03.bpo-26543.X-TJZO.rst b/Misc/NEWS.d/next/Library/2019-08-11-16-28-03.bpo-26543.X-TJZO.rst new file mode 100644 index 0000000000000..8715b8d79cace --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-08-11-16-28-03.bpo-26543.X-TJZO.rst @@ -0,0 +1 @@ +Fix :meth:`IMAP4.noop()` when debug mode is enabled (ex: ``imaplib.Debug = 3``). From webhook-mailer at python.org Tue Jun 2 04:17:29 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Tue, 02 Jun 2020 08:17:29 -0000 Subject: [Python-checkins] Fix MSVC warnings in pythonrun.c (#GH-0587) Message-ID: https://github.com/python/cpython/commit/90d297012b3848454cbd00dde954e3ea1a09e86f commit: 90d297012b3848454cbd00dde954e3ea1a09e86f branch: master author: Ammar Askar committer: GitHub date: 2020-06-02T09:17:24+01:00 summary: Fix MSVC warnings in pythonrun.c (#GH-0587) files: M Python/pythonrun.c diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 160f44d38e2e1..cb0e3b02e163a 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -478,9 +478,9 @@ PyRun_SimpleStringFlags(const char *command, PyCompilerFlags *flags) static int parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, - int *lineno, int *offset, PyObject **text) + Py_ssize_t *lineno, Py_ssize_t *offset, PyObject **text) { - int hold; + Py_ssize_t hold; PyObject *v; _Py_IDENTIFIER(msg); _Py_IDENTIFIER(filename); @@ -513,7 +513,7 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, v = _PyObject_GetAttrId(err, &PyId_lineno); if (!v) goto finally; - hold = _PyLong_AsInt(v); + hold = PyLong_AsSsize_t(v); Py_DECREF(v); if (hold < 0 && PyErr_Occurred()) goto finally; @@ -526,7 +526,7 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, *offset = -1; Py_DECREF(v); } else { - hold = _PyLong_AsInt(v); + hold = PyLong_AsSsize_t(v); Py_DECREF(v); if (hold < 0 && PyErr_Occurred()) goto finally; @@ -552,7 +552,7 @@ parse_syntax_error(PyObject *err, PyObject **message, PyObject **filename, } static void -print_error_text(PyObject *f, int offset, PyObject *text_obj) +print_error_text(PyObject *f, Py_ssize_t offset, PyObject *text_obj) { /* Convert text to a char pointer; return if error */ const char *text = PyUnicode_AsUTF8(text_obj); @@ -586,7 +586,7 @@ print_error_text(PyObject *f, int offset, PyObject *text_obj) break; } Py_ssize_t inl = nl - text; - if (inl >= (Py_ssize_t)offset) { + if (inl >= offset) { break; } inl += 1; @@ -833,7 +833,7 @@ print_exception(PyObject *f, PyObject *value) _PyObject_HasAttrId(value, &PyId_print_file_and_line)) { PyObject *message, *filename, *text; - int lineno, offset; + Py_ssize_t lineno, offset; if (!parse_syntax_error(value, &message, &filename, &lineno, &offset, &text)) PyErr_Clear(); @@ -843,7 +843,7 @@ print_exception(PyObject *f, PyObject *value) Py_DECREF(value); value = message; - line = PyUnicode_FromFormat(" File \"%S\", line %d\n", + line = PyUnicode_FromFormat(" File \"%S\", line %zd\n", filename, lineno); Py_DECREF(filename); if (line != NULL) { From webhook-mailer at python.org Tue Jun 2 04:19:57 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Tue, 02 Jun 2020 08:19:57 -0000 Subject: [Python-checkins] bpo-40244: Remove XLC's support from the noreturn flag (GH-20588) Message-ID: https://github.com/python/cpython/commit/033d10bd21d962a59c6c4fc503092046baa451a1 commit: 033d10bd21d962a59c6c4fc503092046baa451a1 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-06-02T01:19:52-07:00 summary: bpo-40244: Remove XLC's support from the noreturn flag (GH-20588) Automerge-Triggered-By: @pablogsal files: M Include/pyport.h diff --git a/Include/pyport.h b/Include/pyport.h index 63d3b81de5d23..bdbd0c942f682 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -829,10 +829,10 @@ extern _invalid_parameter_handler _Py_silent_invalid_parameter_handler; #endif /* Mark a function which cannot return. Example: + PyAPI_FUNC(void) _Py_NO_RETURN PyThread_exit_thread(void); - PyAPI_FUNC(void) _Py_NO_RETURN PyThread_exit_thread(void); */ + XLC support is intentionally omitted due to bpo-40244 */ #if defined(__clang__) || \ - defined(__xlc__) || \ (defined(__GNUC__) && \ ((__GNUC__ >= 3) || \ (__GNUC__ == 2) && (__GNUC_MINOR__ >= 5))) From webhook-mailer at python.org Tue Jun 2 06:03:02 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 02 Jun 2020 10:03:02 -0000 Subject: [Python-checkins] bpo-40241: What's New in Python 3.9: opaque PyGC_Head (GH-20586) Message-ID: https://github.com/python/cpython/commit/337d3103a2344e1fec75985e85fabcbdedac7d26 commit: 337d3103a2344e1fec75985e85fabcbdedac7d26 branch: master author: Victor Stinner committer: GitHub date: 2020-06-02T12:02:58+02:00 summary: bpo-40241: What's New in Python 3.9: opaque PyGC_Head (GH-20586) files: M Doc/whatsnew/3.9.rst diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index ccc84cced1090..b20cd14565ae1 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -1098,6 +1098,10 @@ Porting to Python 3.9 and refers to a constant string. (Contributed by Serhiy Storchaka in :issue:`38650`.) +* The :c:type:`PyGC_Head` structure is now opaque. It is only defined in the + internal C API (``pycore_gc.h``). + (Contributed by Victor Stinner in :issue:`40241`.) + Removed ------- From webhook-mailer at python.org Tue Jun 2 07:33:19 2020 From: webhook-mailer at python.org (Srinivas Reddy Thatiparthy =?utf-8?q??= =?utf-8?b?KOCwtuCxjeCwsOCxgOCwqOCwv+CwteCwvuCwuOCxjSAg4LCw4LGG4LCh?= =?utf-8?b?4LGN4LCh4LC/IOCwpOCwvuCwn+Cwv+CwquCwsOCxjeCwpOCwvyk=?=) Date: Tue, 02 Jun 2020 11:33:19 -0000 Subject: [Python-checkins] bpo-35078: Allow customization of CSS class name of a month in calendar module (gh-10137) Message-ID: https://github.com/python/cpython/commit/85339f5c220a5e79c47c3a33c93f1dca5c59c52e commit: 85339f5c220a5e79c47c3a33c93f1dca5c59c52e branch: master author: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) committer: GitHub date: 2020-06-02T13:33:09+02:00 summary: bpo-35078: Allow customization of CSS class name of a month in calendar module (gh-10137) Refactor formatweekday(), formatmonthname() methods in LocaleHTMLCalendar and LocaleTextCalendar classes in calendar module to call the base class methods. This enables customizable CSS classes for LocaleHTMLCalendar and LocaleTextCalendar. Patch by Srinivas Reddy Thatiparthy files: A Misc/NEWS.d/next/Library/2018-10-27-09-37-03.bpo-35078.kweA3R.rst M Lib/calendar.py M Lib/test/test_calendar.py diff --git a/Lib/calendar.py b/Lib/calendar.py index 7550d52c0a94a..7311a0173729e 100644 --- a/Lib/calendar.py +++ b/Lib/calendar.py @@ -571,19 +571,11 @@ def __init__(self, firstweekday=0, locale=None): def formatweekday(self, day, width): with different_locale(self.locale): - if width >= 9: - names = day_name - else: - names = day_abbr - name = names[day] - return name[:width].center(width) + return super().formatweekday(day, width) def formatmonthname(self, theyear, themonth, width, withyear=True): with different_locale(self.locale): - s = month_name[themonth] - if withyear: - s = "%s %r" % (s, theyear) - return s.center(width) + return super().formatmonthname(theyear, themonth, width, withyear) class LocaleHTMLCalendar(HTMLCalendar): @@ -601,16 +593,11 @@ def __init__(self, firstweekday=0, locale=None): def formatweekday(self, day): with different_locale(self.locale): - s = day_abbr[day] - return '%s' % (self.cssclasses[day], s) + return super().formatweekday(day) def formatmonthname(self, theyear, themonth, withyear=True): with different_locale(self.locale): - s = month_name[themonth] - if withyear: - s = '%s %s' % (s, theyear) - return '%s' % s - + return super().formatmonthname(theyear, themonth, withyear) # Support for old module level interface c = TextCalendar() diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py index 6241d114d3382..7c7ec1c931aa4 100644 --- a/Lib/test/test_calendar.py +++ b/Lib/test/test_calendar.py @@ -564,6 +564,30 @@ def test_locale_calendars(self): new_october = calendar.TextCalendar().formatmonthname(2010, 10, 10) self.assertEqual(old_october, new_october) + def test_locale_html_calendar_custom_css_class_month_name(self): + try: + cal = calendar.LocaleHTMLCalendar(locale='') + local_month = cal.formatmonthname(2010, 10, 10) + except locale.Error: + # cannot set the system default locale -- skip rest of test + raise unittest.SkipTest('cannot set the system default locale') + self.assertIn('class="month"', local_month) + cal.cssclass_month_head = "text-center month" + local_month = cal.formatmonthname(2010, 10, 10) + self.assertIn('class="text-center month"', local_month) + + def test_locale_html_calendar_custom_css_class_weekday(self): + try: + cal = calendar.LocaleHTMLCalendar(locale='') + local_weekday = cal.formatweekday(6) + except locale.Error: + # cannot set the system default locale -- skip rest of test + raise unittest.SkipTest('cannot set the system default locale') + self.assertIn('class="sun"', local_weekday) + cal.cssclasses_weekday_head = ["mon2", "tue2", "wed2", "thu2", "fri2", "sat2", "sun2"] + local_weekday = cal.formatweekday(6) + self.assertIn('class="sun2"', local_weekday) + def test_itermonthdays3(self): # ensure itermonthdays3 doesn't overflow after datetime.MAXYEAR list(calendar.Calendar().itermonthdays3(datetime.MAXYEAR, 12)) diff --git a/Misc/NEWS.d/next/Library/2018-10-27-09-37-03.bpo-35078.kweA3R.rst b/Misc/NEWS.d/next/Library/2018-10-27-09-37-03.bpo-35078.kweA3R.rst new file mode 100644 index 0000000000000..123f9dabde913 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-10-27-09-37-03.bpo-35078.kweA3R.rst @@ -0,0 +1,3 @@ +Refactor formatweekday, formatmonthname methods in LocaleHTMLCalendar and LocaleTextCalendar classes in calendar module to call the base class methods.This enables customizable CSS classes for LocaleHTMLCalendar. +Patch by Srinivas Reddy Thatiparthy + From webhook-mailer at python.org Tue Jun 2 08:03:34 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 02 Jun 2020 12:03:34 -0000 Subject: [Python-checkins] bpo-40839: PyDict_GetItem() requires the GIL (GH-20580) Message-ID: https://github.com/python/cpython/commit/59d3dce69b0a4f6ee17578ae68037cc7ae90936f commit: 59d3dce69b0a4f6ee17578ae68037cc7ae90936f branch: master author: Victor Stinner committer: GitHub date: 2020-06-02T14:03:25+02:00 summary: bpo-40839: PyDict_GetItem() requires the GIL (GH-20580) Calling PyDict_GetItem() without GIL held had been allowed for historical reason. It is no longer allowed. files: A Misc/NEWS.d/next/C API/2020-06-01-20-47-49.bpo-40839.bAi52Z.rst M Doc/c-api/dict.rst M Doc/whatsnew/3.10.rst M Objects/dictobject.c diff --git a/Doc/c-api/dict.rst b/Doc/c-api/dict.rst index 2fb29cdd61778..7493837ac622f 100644 --- a/Doc/c-api/dict.rst +++ b/Doc/c-api/dict.rst @@ -100,6 +100,10 @@ Dictionary Objects :meth:`__eq__` methods will get suppressed. To get error reporting use :c:func:`PyDict_GetItemWithError()` instead. + .. versionchanged:: 3.10 + Calling this API without :term:`GIL` held had been allowed for historical + reason. It is no longer allowed. + .. c:function:: PyObject* PyDict_GetItemWithError(PyObject *p, PyObject *key) diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 95c5aa7ec6e6b..0b656475b7167 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -148,5 +148,9 @@ Porting to Python 3.10 see :c:func:`Py_SET_SIZE()` (available since Python 3.9). (Contributed by Victor Stinner in :issue:`39573`.) +* Calling :c:func:`PyDict_GetItem` without :term:`GIL` held had been allowed + for historical reason. It is no longer allowed. + (Contributed by Victor Stinner in :issue:`40839`.) + Removed ------- diff --git a/Misc/NEWS.d/next/C API/2020-06-01-20-47-49.bpo-40839.bAi52Z.rst b/Misc/NEWS.d/next/C API/2020-06-01-20-47-49.bpo-40839.bAi52Z.rst new file mode 100644 index 0000000000000..5de2f40c14eca --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-01-20-47-49.bpo-40839.bAi52Z.rst @@ -0,0 +1,2 @@ +Calling :c:func:`PyDict_GetItem` without :term:`GIL` held had been allowed for +historical reason. It is no longer allowed. diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 809a5ed778737..c4d5da51f3193 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -112,7 +112,8 @@ converting the dict to the combined table. #include "Python.h" #include "pycore_gc.h" // _PyObject_GC_IS_TRACKED() -#include "pycore_object.h" +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_pyerrors.h" // _PyErr_Fetch() #include "pycore_pystate.h" // _PyThreadState_GET() #include "dict-common.h" #include "stringlib/eq.h" // unicode_eq() @@ -1387,14 +1388,12 @@ _PyDict_NewPresized(Py_ssize_t minused) PyObject * PyDict_GetItem(PyObject *op, PyObject *key) { - Py_hash_t hash; - Py_ssize_t ix; + if (!PyDict_Check(op)) { + return NULL; + } PyDictObject *mp = (PyDictObject *)op; - PyThreadState *tstate; - PyObject *value; - if (!PyDict_Check(op)) - return NULL; + Py_hash_t hash; if (!PyUnicode_CheckExact(key) || (hash = ((PyASCIIObject *) key)->hash) == -1) { @@ -1405,28 +1404,26 @@ PyDict_GetItem(PyObject *op, PyObject *key) } } - /* We can arrive here with a NULL tstate during initialization: try - running "python -Wi" for an example related to string interning. - Let's just hope that no exception occurs then... This must be - _PyThreadState_GET() and not PyThreadState_Get() because the latter - abort Python if tstate is NULL. */ - tstate = _PyThreadState_GET(); - if (tstate != NULL && tstate->curexc_type != NULL) { - /* preserve the existing exception */ - PyObject *err_type, *err_value, *err_tb; - PyErr_Fetch(&err_type, &err_value, &err_tb); - ix = (mp->ma_keys->dk_lookup)(mp, key, hash, &value); - /* ignore errors */ - PyErr_Restore(err_type, err_value, err_tb); - if (ix < 0) - return NULL; - } - else { - ix = (mp->ma_keys->dk_lookup)(mp, key, hash, &value); - if (ix < 0) { - PyErr_Clear(); - return NULL; - } + PyThreadState *tstate = _PyThreadState_GET(); +#ifdef Py_DEBUG + // bpo-40839: Before Python 3.10, it was possible to call PyDict_GetItem() + // with the GIL released. + _Py_EnsureTstateNotNULL(tstate); +#endif + + /* Preserve the existing exception */ + PyObject *exc_type, *exc_value, *exc_tb; + PyObject *value; + Py_ssize_t ix; + + _PyErr_Fetch(tstate, &exc_type, &exc_value, &exc_tb); + ix = (mp->ma_keys->dk_lookup)(mp, key, hash, &value); + + /* Ignore any exception raised by the lookup */ + _PyErr_Restore(tstate, exc_type, exc_value, exc_tb); + + if (ix < 0) { + return NULL; } return value; } From webhook-mailer at python.org Tue Jun 2 08:40:01 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 02 Jun 2020 12:40:01 -0000 Subject: [Python-checkins] bpo-39465: Cleanup _PyUnicode_FromId() code (GH-20595) Message-ID: https://github.com/python/cpython/commit/297257f7bc198e2dc8e0866b539c73ff1a5cc588 commit: 297257f7bc198e2dc8e0866b539c73ff1a5cc588 branch: master author: Victor Stinner committer: GitHub date: 2020-06-02T14:39:45+02:00 summary: bpo-39465: Cleanup _PyUnicode_FromId() code (GH-20595) Work on a local variable before filling _Py_Identifier members. files: M Objects/unicodeobject.c diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 511640438d015..e69bf01251ced 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -2275,17 +2275,23 @@ PyUnicode_FromString(const char *u) PyObject * _PyUnicode_FromId(_Py_Identifier *id) { - if (!id->object) { - id->object = PyUnicode_DecodeUTF8Stateful(id->string, - strlen(id->string), - NULL, NULL); - if (!id->object) - return NULL; - PyUnicode_InternInPlace(&id->object); - assert(!id->next); - id->next = static_strings; - static_strings = id; + if (id->object) { + return id->object; + } + + PyObject *obj; + obj = PyUnicode_DecodeUTF8Stateful(id->string, + strlen(id->string), + NULL, NULL); + if (!obj) { + return NULL; } + PyUnicode_InternInPlace(&obj); + + assert(!id->next); + id->object = obj; + id->next = static_strings; + static_strings = id; return id->object; } From webhook-mailer at python.org Tue Jun 2 09:51:46 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 02 Jun 2020 13:51:46 -0000 Subject: [Python-checkins] PyOS_AfterFork_Child() uses PyStatus (GH-20596) Message-ID: https://github.com/python/cpython/commit/26881c8fae3b67db3a01d335d3ae7356a29b433e commit: 26881c8fae3b67db3a01d335d3ae7356a29b433e branch: master author: Victor Stinner committer: GitHub date: 2020-06-02T15:51:37+02:00 summary: PyOS_AfterFork_Child() uses PyStatus (GH-20596) PyOS_AfterFork_Child() helper functions now return a PyStatus: PyOS_AfterFork_Child() is now responsible to handle errors. * Move _PySignal_AfterFork() to the internal C API * Add #ifdef HAVE_FORK on _PyGILState_Reinit(), _PySignal_AfterFork() and _PyInterpreterState_DeleteExceptMain(). files: M Include/internal/pycore_ceval.h M Include/internal/pycore_import.h M Include/internal/pycore_pystate.h M Include/internal/pycore_runtime.h M Include/intrcheck.h M Modules/posixmodule.c M Modules/signalmodule.c M Python/ceval.c M Python/import.c M Python/pystate.c diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 368990099089f..2da0154525b1c 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -25,7 +25,7 @@ PyAPI_FUNC(int) _PyEval_AddPendingCall( void *arg); PyAPI_FUNC(void) _PyEval_SignalAsyncExc(PyThreadState *tstate); #ifdef HAVE_FORK -extern void _PyEval_ReInitThreads(struct pyruntimestate *runtime); +extern PyStatus _PyEval_ReInitThreads(struct pyruntimestate *runtime); #endif PyAPI_FUNC(void) _PyEval_SetCoroutineOriginTrackingDepth( PyThreadState *tstate, diff --git a/Include/internal/pycore_import.h b/Include/internal/pycore_import.h index b011ea4425112..35a67abebac6f 100644 --- a/Include/internal/pycore_import.h +++ b/Include/internal/pycore_import.h @@ -11,7 +11,7 @@ PyAPI_FUNC(PyObject *) _PyImport_FindBuiltin( ); #ifdef HAVE_FORK -extern void _PyImport_ReInitLock(void); +extern PyStatus _PyImport_ReInitLock(void); #endif extern void _PyImport_Cleanup(PyThreadState *tstate); diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 7ac4ad5869b4c..423c8113d7ac0 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -131,9 +131,12 @@ PyAPI_FUNC(PyThreadState *) _PyThreadState_Swap( PyThreadState *newts); PyAPI_FUNC(PyStatus) _PyInterpreterState_Enable(_PyRuntimeState *runtime); -PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime); -PyAPI_FUNC(void) _PyGILState_Reinit(_PyRuntimeState *runtime); +#ifdef HAVE_FORK +extern PyStatus _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime); +extern PyStatus _PyGILState_Reinit(_PyRuntimeState *runtime); +extern void _PySignal_AfterFork(void); +#endif PyAPI_FUNC(int) _PyState_AddModule( diff --git a/Include/internal/pycore_runtime.h b/Include/internal/pycore_runtime.h index ebdc12b23a9ca..3a01d64e63d81 100644 --- a/Include/internal/pycore_runtime.h +++ b/Include/internal/pycore_runtime.h @@ -120,7 +120,7 @@ PyAPI_FUNC(PyStatus) _PyRuntimeState_Init(_PyRuntimeState *runtime); PyAPI_FUNC(void) _PyRuntimeState_Fini(_PyRuntimeState *runtime); #ifdef HAVE_FORK -PyAPI_FUNC(void) _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime); +extern PyStatus _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime); #endif /* Initialize _PyRuntimeState. diff --git a/Include/intrcheck.h b/Include/intrcheck.h index e5bf5a834e44c..88f2a7076ce37 100644 --- a/Include/intrcheck.h +++ b/Include/intrcheck.h @@ -1,4 +1,3 @@ - #ifndef Py_INTRCHECK_H #define Py_INTRCHECK_H #ifdef __cplusplus @@ -19,7 +18,6 @@ Py_DEPRECATED(3.7) PyAPI_FUNC(void) PyOS_AfterFork(void); #ifndef Py_LIMITED_API PyAPI_FUNC(int) _PyOS_IsMainThread(void); -PyAPI_FUNC(void) _PySignal_AfterFork(void); #ifdef MS_WINDOWS /* windows.h is not included by Python.h so use void* instead of HANDLE */ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 747184415e8bc..afb6d183077a1 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -34,6 +34,7 @@ #include "pycore_ceval.h" // _PyEval_ReInitThreads() #include "pycore_import.h" // _PyImport_ReInitLock() +#include "pycore_initconfig.h" // _PyStatus_EXCEPTION() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "structmember.h" // PyMemberDef #ifndef MS_WINDOWS @@ -461,15 +462,41 @@ PyOS_AfterFork_Parent(void) void PyOS_AfterFork_Child(void) { + PyStatus status; _PyRuntimeState *runtime = &_PyRuntime; - _PyGILState_Reinit(runtime); - _PyEval_ReInitThreads(runtime); - _PyImport_ReInitLock(); + + status = _PyGILState_Reinit(runtime); + if (_PyStatus_EXCEPTION(status)) { + goto fatal_error; + } + + status = _PyEval_ReInitThreads(runtime); + if (_PyStatus_EXCEPTION(status)) { + goto fatal_error; + } + + status = _PyImport_ReInitLock(); + if (_PyStatus_EXCEPTION(status)) { + goto fatal_error; + } + _PySignal_AfterFork(); - _PyRuntimeState_ReInitThreads(runtime); - _PyInterpreterState_DeleteExceptMain(runtime); + + status = _PyRuntimeState_ReInitThreads(runtime); + if (_PyStatus_EXCEPTION(status)) { + goto fatal_error; + } + + status = _PyInterpreterState_DeleteExceptMain(runtime); + if (_PyStatus_EXCEPTION(status)) { + goto fatal_error; + } run_at_forkers(_PyInterpreterState_GET()->after_forkers_child, 0); + return; + +fatal_error: + Py_ExitStatusException(status); } static int diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 6d340a68634af..24dbd4255a6e4 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -1796,14 +1796,17 @@ PyOS_InterruptOccurred(void) return 1; } + +#ifdef HAVE_FORK static void _clear_pending_signals(void) { - int i; - if (!_Py_atomic_load(&is_tripped)) + if (!_Py_atomic_load(&is_tripped)) { return; + } + _Py_atomic_store(&is_tripped, 0); - for (i = 1; i < NSIG; ++i) { + for (int i = 1; i < NSIG; ++i) { _Py_atomic_store_relaxed(&Handlers[i].tripped, 0); } } @@ -1816,6 +1819,8 @@ _PySignal_AfterFork(void) * the interpreter had an opportunity to call the handlers. issue9535. */ _clear_pending_signals(); } +#endif /* HAVE_FORK */ + int _PyOS_IsMainThread(void) diff --git a/Python/ceval.c b/Python/ceval.c index 01dd361e5035f..5edcfe354054a 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -433,11 +433,9 @@ PyEval_ReleaseThread(PyThreadState *tstate) #ifdef HAVE_FORK /* This function is called from PyOS_AfterFork_Child to destroy all threads - * which are not running in the child process, and clear internal locks - * which might be held by those threads. - */ - -void + which are not running in the child process, and clear internal locks + which might be held by those threads. */ +PyStatus _PyEval_ReInitThreads(_PyRuntimeState *runtime) { PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); @@ -449,7 +447,7 @@ _PyEval_ReInitThreads(_PyRuntimeState *runtime) struct _gil_runtime_state *gil = &runtime->ceval.gil; #endif if (!gil_created(gil)) { - return; + return _PyStatus_OK(); } recreate_gil(gil); @@ -457,11 +455,12 @@ _PyEval_ReInitThreads(_PyRuntimeState *runtime) struct _pending_calls *pending = &tstate->interp->ceval.pending; if (_PyThread_at_fork_reinit(&pending->lock) < 0) { - Py_FatalError("Can't initialize threads for pending calls"); + return _PyStatus_ERR("Can't reinitialize pending calls lock"); } /* Destroy all threads except the current one */ _PyThreadState_DeleteExcept(runtime, tstate); + return _PyStatus_OK(); } #endif diff --git a/Python/import.c b/Python/import.c index 0e2e7c370868f..35724fef37a6b 100644 --- a/Python/import.c +++ b/Python/import.c @@ -148,7 +148,7 @@ _PyImportZip_Init(PyThreadState *tstate) in different threads to return with a partially loaded module. These calls are serialized by the global interpreter lock. */ -static PyThread_type_lock import_lock = 0; +static PyThread_type_lock import_lock = NULL; static unsigned long import_lock_thread = PYTHREAD_INVALID_THREAD_ID; static int import_lock_level = 0; @@ -171,7 +171,7 @@ _PyImport_AcquireLock(void) !PyThread_acquire_lock(import_lock, 0)) { PyThreadState *tstate = PyEval_SaveThread(); - PyThread_acquire_lock(import_lock, 1); + PyThread_acquire_lock(import_lock, WAIT_LOCK); PyEval_RestoreThread(tstate); } assert(import_lock_level == 0); @@ -197,19 +197,19 @@ _PyImport_ReleaseLock(void) } #ifdef HAVE_FORK -/* This function is called from PyOS_AfterFork_Child to ensure that newly +/* This function is called from PyOS_AfterFork_Child() to ensure that newly created child processes do not share locks with the parent. We now acquire the import lock around fork() calls but on some platforms (Solaris 9 and earlier? see isue7242) that still left us with problems. */ - -void +PyStatus _PyImport_ReInitLock(void) { if (import_lock != NULL) { if (_PyThread_at_fork_reinit(&import_lock) < 0) { - _Py_FatalErrorFunc(__func__, "failed to create a new lock"); + return _PyStatus_ERR("failed to create a new lock"); } } + if (import_lock_level > 1) { /* Forked as a side effect of import */ unsigned long me = PyThread_get_thread_ident(); @@ -224,6 +224,7 @@ _PyImport_ReInitLock(void) import_lock_thread = PYTHREAD_INVALID_THREAD_ID; import_lock_level = 0; } + return _PyStatus_OK(); } #endif diff --git a/Python/pystate.c b/Python/pystate.c index f92c55e747169..72d8b36342517 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -124,10 +124,8 @@ _PyRuntimeState_Fini(_PyRuntimeState *runtime) #ifdef HAVE_FORK /* This function is called from PyOS_AfterFork_Child to ensure that - * newly created child processes do not share locks with the parent. - */ - -void + newly created child processes do not share locks with the parent. */ +PyStatus _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime) { // This was initially set in _PyRuntimeState_Init(). @@ -138,23 +136,20 @@ _PyRuntimeState_ReInitThreads(_PyRuntimeState *runtime) PyMemAllocatorEx old_alloc; _PyMem_SetDefaultAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - int interp_mutex = _PyThread_at_fork_reinit(&runtime->interpreters.mutex); - int main_interp_id_mutex = _PyThread_at_fork_reinit(&runtime->interpreters.main->id_mutex); - int xidregistry_mutex = _PyThread_at_fork_reinit(&runtime->xidregistry.mutex); + int reinit_interp = _PyThread_at_fork_reinit(&runtime->interpreters.mutex); + int reinit_main_id = _PyThread_at_fork_reinit(&runtime->interpreters.main->id_mutex); + int reinit_xidregistry = _PyThread_at_fork_reinit(&runtime->xidregistry.mutex); PyMem_SetAllocator(PYMEM_DOMAIN_RAW, &old_alloc); - if (interp_mutex < 0) { - Py_FatalError("Can't initialize lock for runtime interpreters"); - } - - if (main_interp_id_mutex < 0) { - Py_FatalError("Can't initialize ID lock for main interpreter"); - } + if (reinit_interp < 0 + || reinit_main_id < 0 + || reinit_xidregistry < 0) + { + return _PyStatus_ERR("Failed to reinitialize runtime locks"); - if (xidregistry_mutex < 0) { - Py_FatalError("Can't initialize lock for cross-interpreter data registry"); } + return _PyStatus_OK(); } #endif @@ -373,11 +368,12 @@ PyInterpreterState_Delete(PyInterpreterState *interp) } +#ifdef HAVE_FORK /* * Delete all interpreter states except the main interpreter. If there * is a current interpreter state, it *must* be the main interpreter. */ -void +PyStatus _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime) { struct _gilstate_runtime_state *gilstate = &runtime->gilstate; @@ -385,7 +381,7 @@ _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime) PyThreadState *tstate = _PyThreadState_Swap(gilstate, NULL); if (tstate != NULL && tstate->interp != interpreters->main) { - Py_FatalError("not main interpreter"); + return _PyStatus_ERR("not main interpreter"); } HEAD_LOCK(runtime); @@ -411,10 +407,12 @@ _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime) HEAD_UNLOCK(runtime); if (interpreters->head == NULL) { - Py_FatalError("missing main interpreter"); + return _PyStatus_ERR("missing main interpreter"); } _PyThreadState_Swap(gilstate, tstate); + return _PyStatus_OK(); } +#endif PyInterpreterState * @@ -1259,11 +1257,12 @@ _PyGILState_Fini(PyThreadState *tstate) gilstate->autoInterpreterState = NULL; } +#ifdef HAVE_FORK /* Reset the TSS key - called by PyOS_AfterFork_Child(). * This should not be necessary, but some - buggy - pthread implementations * don't reset TSS upon fork(), see issue #10517. */ -void +PyStatus _PyGILState_Reinit(_PyRuntimeState *runtime) { struct _gilstate_runtime_state *gilstate = &runtime->gilstate; @@ -1271,7 +1270,7 @@ _PyGILState_Reinit(_PyRuntimeState *runtime) PyThread_tss_delete(&gilstate->autoTSSkey); if (PyThread_tss_create(&gilstate->autoTSSkey) != 0) { - Py_FatalError("Could not allocate TSS entry"); + return _PyStatus_NO_MEMORY(); } /* If the thread had an associated auto thread state, reassociate it with @@ -1279,9 +1278,11 @@ _PyGILState_Reinit(_PyRuntimeState *runtime) if (tstate && PyThread_tss_set(&gilstate->autoTSSkey, (void *)tstate) != 0) { - Py_FatalError("Couldn't create autoTSSkey mapping"); + return _PyStatus_ERR("failed to set autoTSSkey"); } + return _PyStatus_OK(); } +#endif /* When a thread state is created for a thread by some mechanism other than PyGILState_Ensure, it's important that the GILState machinery knows about From webhook-mailer at python.org Tue Jun 2 11:14:01 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 02 Jun 2020 15:14:01 -0000 Subject: [Python-checkins] bpo-40232: _PyImport_ReInitLock() can now safely use its lock (GH-20597) Message-ID: https://github.com/python/cpython/commit/45b34a04a577aa49fa4825421758c3e8eaa1625d commit: 45b34a04a577aa49fa4825421758c3e8eaa1625d branch: master author: Victor Stinner committer: GitHub date: 2020-06-02T17:13:49+02:00 summary: bpo-40232: _PyImport_ReInitLock() can now safely use its lock (GH-20597) Since _PyImport_ReInitLock() now calls _PyThread_at_fork_reinit() on the import lock, the lock is now in a known state: unlocked. It became safe to acquire it after fork. files: M Python/import.c diff --git a/Python/import.c b/Python/import.c index 35724fef37a6b..505688400ef3e 100644 --- a/Python/import.c +++ b/Python/import.c @@ -213,11 +213,7 @@ _PyImport_ReInitLock(void) if (import_lock_level > 1) { /* Forked as a side effect of import */ unsigned long me = PyThread_get_thread_ident(); - /* The following could fail if the lock is already held, but forking as - a side-effect of an import is a) rare, b) nuts, and c) difficult to - do thanks to the lock only being held when doing individual module - locks per import. */ - PyThread_acquire_lock(import_lock, NOWAIT_LOCK); + PyThread_acquire_lock(import_lock, WAIT_LOCK); import_lock_thread = me; import_lock_level--; } else { From webhook-mailer at python.org Tue Jun 2 12:45:03 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 02 Jun 2020 16:45:03 -0000 Subject: [Python-checkins] PyOS_AfterFork_Child() pass tstate to _PyEval_ReInitThreads() (GH-20598) Message-ID: https://github.com/python/cpython/commit/317bab0bf61e4cbab37c81baf185d8b57ca62a6b commit: 317bab0bf61e4cbab37c81baf185d8b57ca62a6b branch: master author: Victor Stinner committer: GitHub date: 2020-06-02T18:44:54+02:00 summary: PyOS_AfterFork_Child() pass tstate to _PyEval_ReInitThreads() (GH-20598) files: M Include/internal/pycore_ceval.h M Modules/posixmodule.c M Python/ceval.c diff --git a/Include/internal/pycore_ceval.h b/Include/internal/pycore_ceval.h index 2da0154525b1c..aafb533b57d5f 100644 --- a/Include/internal/pycore_ceval.h +++ b/Include/internal/pycore_ceval.h @@ -25,7 +25,7 @@ PyAPI_FUNC(int) _PyEval_AddPendingCall( void *arg); PyAPI_FUNC(void) _PyEval_SignalAsyncExc(PyThreadState *tstate); #ifdef HAVE_FORK -extern PyStatus _PyEval_ReInitThreads(struct pyruntimestate *runtime); +extern PyStatus _PyEval_ReInitThreads(PyThreadState *tstate); #endif PyAPI_FUNC(void) _PyEval_SetCoroutineOriginTrackingDepth( PyThreadState *tstate, diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index afb6d183077a1..79779bfdeafd3 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -470,7 +470,10 @@ PyOS_AfterFork_Child(void) goto fatal_error; } - status = _PyEval_ReInitThreads(runtime); + PyThreadState *tstate = _PyThreadState_GET(); + _Py_EnsureTstateNotNULL(tstate); + + status = _PyEval_ReInitThreads(tstate); if (_PyStatus_EXCEPTION(status)) { goto fatal_error; } @@ -491,8 +494,9 @@ PyOS_AfterFork_Child(void) if (_PyStatus_EXCEPTION(status)) { goto fatal_error; } + assert(_PyThreadState_GET() == tstate); - run_at_forkers(_PyInterpreterState_GET()->after_forkers_child, 0); + run_at_forkers(tstate->interp->after_forkers_child, 0); return; fatal_error: diff --git a/Python/ceval.c b/Python/ceval.c index 5edcfe354054a..9ab8329d6d8e7 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -436,10 +436,9 @@ PyEval_ReleaseThread(PyThreadState *tstate) which are not running in the child process, and clear internal locks which might be held by those threads. */ PyStatus -_PyEval_ReInitThreads(_PyRuntimeState *runtime) +_PyEval_ReInitThreads(PyThreadState *tstate) { - PyThreadState *tstate = _PyRuntimeState_GetThreadState(runtime); - _Py_EnsureTstateNotNULL(tstate); + _PyRuntimeState *runtime = tstate->interp->runtime; #ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS struct _gil_runtime_state *gil = &tstate->interp->ceval.gil; From webhook-mailer at python.org Wed Jun 3 08:36:56 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 03 Jun 2020 12:36:56 -0000 Subject: [Python-checkins] bpo-32604: Fix reference leak in select module (GH-20600) Message-ID: https://github.com/python/cpython/commit/18a90248fdd92b27098cc4db773686a2d10a4d24 commit: 18a90248fdd92b27098cc4db773686a2d10a4d24 branch: master author: Victor Stinner committer: GitHub date: 2020-06-03T14:36:46+02:00 summary: bpo-32604: Fix reference leak in select module (GH-20600) Fix reference leak in PyInit_select() of the select module: remove Py_INCREF(poll_Type). files: A Misc/NEWS.d/next/Library/2020-06-02-23-49-07.bpo-32604.ZN4V4l.rst M Modules/selectmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-06-02-23-49-07.bpo-32604.ZN4V4l.rst b/Misc/NEWS.d/next/Library/2020-06-02-23-49-07.bpo-32604.ZN4V4l.rst new file mode 100644 index 0000000000000..6375276602e4a --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-02-23-49-07.bpo-32604.ZN4V4l.rst @@ -0,0 +1,2 @@ +Fix reference leak in the :mod:`select` module when the the module is +imported in a subinterpreter. diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 04e0067eec218..adf014fac43d4 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -2482,7 +2482,6 @@ PyInit_select(void) if (poll_Type == NULL) return NULL; get_select_state(m)->poll_Type = (PyTypeObject *)poll_Type; - Py_INCREF(poll_Type); PyModule_AddIntMacro(m, POLLIN); PyModule_AddIntMacro(m, POLLPRI); @@ -2518,7 +2517,6 @@ PyInit_select(void) if (devpoll_Type == NULL) return NULL; get_select_state(m)->devpoll_Type = (PyTypeObject *)devpoll_Type; - Py_INCREF(devpoll_Type); #endif #ifdef HAVE_EPOLL From webhook-mailer at python.org Wed Jun 3 08:40:04 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 03 Jun 2020 12:40:04 -0000 Subject: [Python-checkins] bpo-40826: Add _PyOS_InterruptOccurred(tstate) function (GH-20599) Message-ID: https://github.com/python/cpython/commit/fa7ab6aa0f9a4f695e5525db5a113cd21fa93787 commit: fa7ab6aa0f9a4f695e5525db5a113cd21fa93787 branch: master author: Victor Stinner committer: GitHub date: 2020-06-03T14:39:59+02:00 summary: bpo-40826: Add _PyOS_InterruptOccurred(tstate) function (GH-20599) my_fgets() now calls _PyOS_InterruptOccurred(tstate) to check for pending signals, rather calling PyOS_InterruptOccurred(). my_fgets() is called with the GIL released, whereas PyOS_InterruptOccurred() must be called with the GIL held. test_repl: use text=True and avoid SuppressCrashReport in test_multiline_string_parsing(). Fix my_fgets() on Windows: fgets(fp) does crash if fileno(fp) is closed. files: M Include/internal/pycore_pystate.h M Lib/test/test_repl.py M Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst M Modules/signalmodule.c M Parser/myreadline.c diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index 423c8113d7ac0..0cd5550cfda5c 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -144,6 +144,9 @@ PyAPI_FUNC(int) _PyState_AddModule( PyObject* module, struct PyModuleDef* def); + +PyAPI_FUNC(int) _PyOS_InterruptOccurred(PyThreadState *tstate); + #ifdef __cplusplus } #endif diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index 71f192f90d9a1..563f188706b93 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -29,7 +29,9 @@ def spawn_repl(*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kw): # test.support.script_helper. env = kw.setdefault('env', dict(os.environ)) env['TERM'] = 'vt100' - return subprocess.Popen(cmd_line, executable=sys.executable, + return subprocess.Popen(cmd_line, + executable=sys.executable, + text=True, stdin=subprocess.PIPE, stdout=stdout, stderr=stderr, **kw) @@ -49,12 +51,11 @@ def test_no_memory(self): sys.exit(0) """ user_input = dedent(user_input) - user_input = user_input.encode() p = spawn_repl() with SuppressCrashReport(): p.stdin.write(user_input) output = kill_python(p) - self.assertIn(b'After the exception.', output) + self.assertIn('After the exception.', output) # Exit code 120: Py_FinalizeEx() failed to flush stdout and stderr. self.assertIn(p.returncode, (1, 120)) @@ -86,13 +87,22 @@ def test_multiline_string_parsing(self): """ ''' user_input = dedent(user_input) - user_input = user_input.encode() p = spawn_repl() - with SuppressCrashReport(): - p.stdin.write(user_input) + p.stdin.write(user_input) output = kill_python(p) self.assertEqual(p.returncode, 0) + def test_close_stdin(self): + user_input = dedent(''' + import os + print("before close") + os.close(0) + ''') + process = spawn_repl() + output = process.communicate(user_input)[0] + self.assertEqual(process.returncode, 0) + self.assertIn('before close', output) + if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst index f79f20d21d49c..a03ed180eb952 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst @@ -1 +1,2 @@ -Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set an exception. +Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set an exception +and pass the Python thread state when checking if there is a pending signal. diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 24dbd4255a6e4..ef3536a210b04 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -1779,10 +1779,11 @@ PyOS_FiniInterrupts(void) finisignal(); } + +// The caller doesn't have to hold the GIL int -PyOS_InterruptOccurred(void) +_PyOS_InterruptOccurred(PyThreadState *tstate) { - PyThreadState *tstate = _PyThreadState_GET(); _Py_EnsureTstateNotNULL(tstate); if (!_Py_ThreadCanHandleSignals(tstate->interp)) { return 0; @@ -1797,6 +1798,15 @@ PyOS_InterruptOccurred(void) } +// The caller must to hold the GIL +int +PyOS_InterruptOccurred(void) +{ + PyThreadState *tstate = _PyThreadState_GET(); + return _PyOS_InterruptOccurred(tstate); +} + + #ifdef HAVE_FORK static void _clear_pending_signals(void) diff --git a/Parser/myreadline.c b/Parser/myreadline.c index d2787f0d345cf..2dd362321aaf3 100644 --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -24,14 +24,23 @@ static PyThread_type_lock _PyOS_ReadlineLock = NULL; int (*PyOS_InputHook)(void) = NULL; /* This function restarts a fgets() after an EINTR error occurred - except if PyOS_InterruptOccurred() returns true. */ + except if _PyOS_InterruptOccurred() returns true. */ static int my_fgets(PyThreadState* tstate, char *buf, int len, FILE *fp) { #ifdef MS_WINDOWS - HANDLE hInterruptEvent; + HANDLE handle; + _Py_BEGIN_SUPPRESS_IPH + handle = (HANDLE)_get_osfhandle(fileno(fp)); + _Py_END_SUPPRESS_IPH + + /* bpo-40826: fgets(fp) does crash if fileno(fp) is closed */ + if (handle == INVALID_HANDLE_VALUE) { + return -1; /* EOF */ + } #endif + while (1) { if (PyOS_InputHook != NULL) { (void)(PyOS_InputHook)(); @@ -60,7 +69,7 @@ my_fgets(PyThreadState* tstate, char *buf, int len, FILE *fp) through to check for EOF. */ if (GetLastError()==ERROR_OPERATION_ABORTED) { - hInterruptEvent = _PyOS_SigintEvent(); + HANDLE hInterruptEvent = _PyOS_SigintEvent(); switch (WaitForSingleObjectEx(hInterruptEvent, 10, FALSE)) { case WAIT_OBJECT_0: ResetEvent(hInterruptEvent); @@ -90,7 +99,7 @@ my_fgets(PyThreadState* tstate, char *buf, int len, FILE *fp) } #endif - if (PyOS_InterruptOccurred()) { + if (_PyOS_InterruptOccurred(tstate)) { return 1; /* Interrupt */ } return -2; /* Error */ From webhook-mailer at python.org Wed Jun 3 08:42:38 2020 From: webhook-mailer at python.org (Jeremy Attali) Date: Wed, 03 Jun 2020 12:42:38 -0000 Subject: [Python-checkins] bpo-40767: Allow pure Wayland to get default XDG web browser (GH-20382) Message-ID: https://github.com/python/cpython/commit/c822efeda9a0afe87cf3429724732fc8e19a01fb commit: c822efeda9a0afe87cf3429724732fc8e19a01fb branch: master author: Jeremy Attali committer: GitHub date: 2020-06-03T05:42:33-07:00 summary: bpo-40767: Allow pure Wayland to get default XDG web browser (GH-20382) Would be nice to backport to python 3.7+. I don't think it's worth the hassle to backport this all the way down to 3.10. But I'll let the maintainers decide. This is hard to test because the test setup already includes this [environment variable](https://github.com/python/cpython/blob/master/Lib/test/pythoninfo.py#L292) Let me know if something doesn't match the PR guidelines. This is my first PR in the python source code. files: A Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 9c73bcfb44ae8..3dcf66b659825 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -545,7 +545,7 @@ def register_standard_browsers(): register(browser, None, BackgroundBrowser(browser)) else: # Prefer X browsers if present - if os.environ.get("DISPLAY"): + if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"): try: cmd = "xdg-settings get default-web-browser".split() raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) diff --git a/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst b/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst new file mode 100644 index 0000000000000..4bebb311b4d54 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst @@ -0,0 +1,3 @@ +:mod:`webbrowser` now properly finds the default browser in pure Wayland +systems by checking the WAYLAND_DISPLAY environment variable. Patch +contributed by J?r?my Attali. From webhook-mailer at python.org Wed Jun 3 09:01:28 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 03 Jun 2020 13:01:28 -0000 Subject: [Python-checkins] bpo-40767: Allow pure Wayland to get default XDG web browser (GH-20382) Message-ID: https://github.com/python/cpython/commit/911c35d5d334b8c148202f2a7a32b511958032fc commit: 911c35d5d334b8c148202f2a7a32b511958032fc branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-03T06:01:23-07:00 summary: bpo-40767: Allow pure Wayland to get default XDG web browser (GH-20382) Would be nice to backport to python 3.7+. I don't think it's worth the hassle to backport this all the way down to 3.10. But I'll let the maintainers decide. This is hard to test because the test setup already includes this [environment variable](https://github.com/python/cpython/blob/master/Lib/test/pythoninfo.pyGH-L292) Let me know if something doesn't match the PR guidelines. This is my first PR in the python source code. (cherry picked from commit c822efeda9a0afe87cf3429724732fc8e19a01fb) Co-authored-by: Jeremy Attali files: A Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 34b86a505c246..b04ec7b65ae3b 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -540,7 +540,7 @@ def register_standard_browsers(): register(browser, None, BackgroundBrowser(browser)) else: # Prefer X browsers if present - if os.environ.get("DISPLAY"): + if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"): try: cmd = "xdg-settings get default-web-browser".split() raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) diff --git a/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst b/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst new file mode 100644 index 0000000000000..4bebb311b4d54 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst @@ -0,0 +1,3 @@ +:mod:`webbrowser` now properly finds the default browser in pure Wayland +systems by checking the WAYLAND_DISPLAY environment variable. Patch +contributed by J?r?my Attali. From webhook-mailer at python.org Wed Jun 3 09:02:37 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 03 Jun 2020 13:02:37 -0000 Subject: [Python-checkins] bpo-40767: Allow pure Wayland to get default XDG web browser (GH-20382) Message-ID: https://github.com/python/cpython/commit/5b8787ef191864cd2313015959bcc3e10711aaff commit: 5b8787ef191864cd2313015959bcc3e10711aaff branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-03T06:02:33-07:00 summary: bpo-40767: Allow pure Wayland to get default XDG web browser (GH-20382) Would be nice to backport to python 3.7+. I don't think it's worth the hassle to backport this all the way down to 3.10. But I'll let the maintainers decide. This is hard to test because the test setup already includes this [environment variable](https://github.com/python/cpython/blob/master/Lib/test/pythoninfo.pyGH-L292) Let me know if something doesn't match the PR guidelines. This is my first PR in the python source code. (cherry picked from commit c822efeda9a0afe87cf3429724732fc8e19a01fb) Co-authored-by: Jeremy Attali files: A Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 9c73bcfb44ae8..3dcf66b659825 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -545,7 +545,7 @@ def register_standard_browsers(): register(browser, None, BackgroundBrowser(browser)) else: # Prefer X browsers if present - if os.environ.get("DISPLAY"): + if os.environ.get("DISPLAY") or os.environ.get("WAYLAND_DISPLAY"): try: cmd = "xdg-settings get default-web-browser".split() raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) diff --git a/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst b/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst new file mode 100644 index 0000000000000..4bebb311b4d54 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst @@ -0,0 +1,3 @@ +:mod:`webbrowser` now properly finds the default browser in pure Wayland +systems by checking the WAYLAND_DISPLAY environment variable. Patch +contributed by J?r?my Attali. From webhook-mailer at python.org Wed Jun 3 09:19:50 2020 From: webhook-mailer at python.org (Alex Povel) Date: Wed, 03 Jun 2020 13:19:50 -0000 Subject: [Python-checkins] bpo-40471: Fix grammar typo in 'issubclass' docstring (GH-19847) Message-ID: https://github.com/python/cpython/commit/df773f8c5454acebe08c31e7308597fa5a8bf5df commit: df773f8c5454acebe08c31e7308597fa5a8bf5df branch: master author: Alex Povel <48824213+alexpovel at users.noreply.github.com> committer: GitHub date: 2020-06-03T06:19:45-07:00 summary: bpo-40471: Fix grammar typo in 'issubclass' docstring (GH-19847) Just a brief grammar fix. See also <>. files: M Python/bltinmodule.c M Python/clinic/bltinmodule.c.h diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 199b09c4d8c41..65f9528084654 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2494,17 +2494,17 @@ issubclass as builtin_issubclass class_or_tuple: object / -Return whether 'cls' is a derived from another class or is the same class. +Return whether 'cls' is derived from another class or is the same class. A tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to check against. This is equivalent to ``issubclass(x, A) or issubclass(x, B) -or ...`` etc. +or ...``. [clinic start generated code]*/ static PyObject * builtin_issubclass_impl(PyObject *module, PyObject *cls, PyObject *class_or_tuple) -/*[clinic end generated code: output=358412410cd7a250 input=af5f35e9ceaddaf6]*/ +/*[clinic end generated code: output=358412410cd7a250 input=a24b9f3d58c370d6]*/ { int retval; diff --git a/Python/clinic/bltinmodule.c.h b/Python/clinic/bltinmodule.c.h index 377afded9f8c5..bc3b518792811 100644 --- a/Python/clinic/bltinmodule.c.h +++ b/Python/clinic/bltinmodule.c.h @@ -800,11 +800,11 @@ PyDoc_STRVAR(builtin_issubclass__doc__, "issubclass($module, cls, class_or_tuple, /)\n" "--\n" "\n" -"Return whether \'cls\' is a derived from another class or is the same class.\n" +"Return whether \'cls\' is derived from another class or is the same class.\n" "\n" "A tuple, as in ``issubclass(x, (A, B, ...))``, may be given as the target to\n" "check against. This is equivalent to ``issubclass(x, A) or issubclass(x, B)\n" -"or ...`` etc."); +"or ...``."); #define BUILTIN_ISSUBCLASS_METHODDEF \ {"issubclass", (PyCFunction)(void(*)(void))builtin_issubclass, METH_FASTCALL, builtin_issubclass__doc__}, @@ -830,4 +830,4 @@ builtin_issubclass(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=780fd9712ec6a6db input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e2fcf0201790367c input=a9049054013a1b77]*/ From webhook-mailer at python.org Wed Jun 3 10:18:23 2020 From: webhook-mailer at python.org (aboddie) Date: Wed, 03 Jun 2020 14:18:23 -0000 Subject: [Python-checkins] Update error message in _zoneinfo.py to use f-string (GH-20577) Message-ID: https://github.com/python/cpython/commit/5b9fbbabacca0378755fd9cadc4a7cc01a71eaef commit: 5b9fbbabacca0378755fd9cadc4a7cc01a71eaef branch: master author: aboddie <64019758+aboddie at users.noreply.github.com> committer: GitHub date: 2020-06-03T07:18:19-07:00 summary: Update error message in _zoneinfo.py to use f-string (GH-20577) Inline with the rest of the file, updated error message to use f-string. files: M Lib/zoneinfo/_zoneinfo.py diff --git a/Lib/zoneinfo/_zoneinfo.py b/Lib/zoneinfo/_zoneinfo.py index 7b1718a0676e1..9810637d3ef65 100644 --- a/Lib/zoneinfo/_zoneinfo.py +++ b/Lib/zoneinfo/_zoneinfo.py @@ -742,7 +742,7 @@ def _parse_tz_delta(tz_delta): if not -86400 < total < 86400: raise ValueError( - "Offset must be strictly between -24h and +24h:" + tz_delta + f"Offset must be strictly between -24h and +24h: {tz_delta}" ) # Yes, +5 maps to an offset of -5h From webhook-mailer at python.org Wed Jun 3 11:43:54 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Wed, 03 Jun 2020 15:43:54 -0000 Subject: [Python-checkins] Remove unused ReaderObject_Check macro (#20614) Message-ID: https://github.com/python/cpython/commit/586be6f3ff68ab4034e555f1434a4427e129ad0b commit: 586be6f3ff68ab4034e555f1434a4427e129ad0b branch: master author: Dong-hee Na committer: GitHub date: 2020-06-04T00:43:46+09:00 summary: Remove unused ReaderObject_Check macro (#20614) files: M Modules/_csv.c diff --git a/Modules/_csv.c b/Modules/_csv.c index 3a52632ccfd45..f33733aaf850d 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -112,8 +112,6 @@ typedef struct { static PyTypeObject Reader_Type; -#define ReaderObject_Check(v) Py_IS_TYPE(v, &Reader_Type) - typedef struct { PyObject_HEAD From webhook-mailer at python.org Wed Jun 3 12:28:27 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 03 Jun 2020 16:28:27 -0000 Subject: [Python-checkins] [3.9] bpo-40826: Fix GIL usage in PyOS_Readline() (GH-20613) (GH-20616) Message-ID: https://github.com/python/cpython/commit/6f7346bb3983cd7a6aa97eeeafffb3cecd5292b8 commit: 6f7346bb3983cd7a6aa97eeeafffb3cecd5292b8 branch: 3.8 author: Victor Stinner committer: GitHub date: 2020-06-03T18:28:18+02:00 summary: [3.9] bpo-40826: Fix GIL usage in PyOS_Readline() (GH-20613) (GH-20616) * bpo-40826: Fix GIL usage in PyOS_Readline() (GH-20579) Fix GIL usage in PyOS_Readline(): lock the GIL to set an exception. Pass tstate to my_fgets() and _PyOS_WindowsConsoleReadline(). Cleanup these functions. (cherry picked from commit c353764fd564e401cf47a5d9efab18c72c60014e) * bpo-40826: Add _PyOS_InterruptOccurred(tstate) function (GH-20599) my_fgets() now calls _PyOS_InterruptOccurred(tstate) to check for pending signals, rather calling PyOS_InterruptOccurred(). my_fgets() is called with the GIL released, whereas PyOS_InterruptOccurred() must be called with the GIL held. test_repl: use text=True and avoid SuppressCrashReport in test_multiline_string_parsing(). Fix my_fgets() on Windows: fgets(fp) does crash if fileno(fp) is closed. (cherry picked from commit fa7ab6aa0f9a4f695e5525db5a113cd21fa93787) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst M Include/internal/pycore_pystate.h M Lib/test/test_repl.py M Modules/signalmodule.c M Parser/myreadline.c diff --git a/Include/internal/pycore_pystate.h b/Include/internal/pycore_pystate.h index f90e7e1ab78e3..96d5e31d83a6e 100644 --- a/Include/internal/pycore_pystate.h +++ b/Include/internal/pycore_pystate.h @@ -317,6 +317,9 @@ PyAPI_FUNC(void) _PyInterpreterState_DeleteExceptMain(_PyRuntimeState *runtime); PyAPI_FUNC(void) _PyGILState_Reinit(_PyRuntimeState *runtime); + +PyAPI_FUNC(int) _PyOS_InterruptOccurred(PyThreadState *tstate); + #ifdef __cplusplus } #endif diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index 71f192f90d9a1..563f188706b93 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -29,7 +29,9 @@ def spawn_repl(*args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, **kw): # test.support.script_helper. env = kw.setdefault('env', dict(os.environ)) env['TERM'] = 'vt100' - return subprocess.Popen(cmd_line, executable=sys.executable, + return subprocess.Popen(cmd_line, + executable=sys.executable, + text=True, stdin=subprocess.PIPE, stdout=stdout, stderr=stderr, **kw) @@ -49,12 +51,11 @@ def test_no_memory(self): sys.exit(0) """ user_input = dedent(user_input) - user_input = user_input.encode() p = spawn_repl() with SuppressCrashReport(): p.stdin.write(user_input) output = kill_python(p) - self.assertIn(b'After the exception.', output) + self.assertIn('After the exception.', output) # Exit code 120: Py_FinalizeEx() failed to flush stdout and stderr. self.assertIn(p.returncode, (1, 120)) @@ -86,13 +87,22 @@ def test_multiline_string_parsing(self): """ ''' user_input = dedent(user_input) - user_input = user_input.encode() p = spawn_repl() - with SuppressCrashReport(): - p.stdin.write(user_input) + p.stdin.write(user_input) output = kill_python(p) self.assertEqual(p.returncode, 0) + def test_close_stdin(self): + user_input = dedent(''' + import os + print("before close") + os.close(0) + ''') + process = spawn_repl() + output = process.communicate(user_input)[0] + self.assertEqual(process.returncode, 0) + self.assertIn('before close', output) + if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst new file mode 100644 index 0000000000000..a03ed180eb952 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst @@ -0,0 +1,2 @@ +Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set an exception +and pass the Python thread state when checking if there is a pending signal. diff --git a/Modules/signalmodule.c b/Modules/signalmodule.c index 0c9a2671fe19b..119fc355ff1fd 100644 --- a/Modules/signalmodule.c +++ b/Modules/signalmodule.c @@ -187,14 +187,20 @@ itimer_retval(struct itimerval *iv) #endif static int -is_main(_PyRuntimeState *runtime) +is_main_interp(_PyRuntimeState *runtime, PyInterpreterState *interp) { unsigned long thread = PyThread_get_thread_ident(); - PyInterpreterState *interp = _PyRuntimeState_GetThreadState(runtime)->interp; return (thread == runtime->main_thread && interp == runtime->interpreters.main); } +static int +is_main(_PyRuntimeState *runtime) +{ + PyInterpreterState *interp = _PyRuntimeState_GetThreadState(runtime)->interp; + return is_main_interp(runtime, interp); +} + static PyObject * signal_default_int_handler(PyObject *self, PyObject *args) { @@ -1726,12 +1732,14 @@ PyOS_FiniInterrupts(void) finisignal(); } + +// The caller doesn't have to hold the GIL int -PyOS_InterruptOccurred(void) +_PyOS_InterruptOccurred(PyThreadState *tstate) { if (_Py_atomic_load_relaxed(&Handlers[SIGINT].tripped)) { _PyRuntimeState *runtime = &_PyRuntime; - if (!is_main(runtime)) { + if (!is_main_interp(runtime, tstate->interp)) { return 0; } _Py_atomic_store_relaxed(&Handlers[SIGINT].tripped, 0); @@ -1740,6 +1748,16 @@ PyOS_InterruptOccurred(void) return 0; } + +// The caller must to hold the GIL +int +PyOS_InterruptOccurred(void) +{ + PyThreadState *tstate = _PyThreadState_GET(); + return _PyOS_InterruptOccurred(tstate); +} + + static void _clear_pending_signals(void) { diff --git a/Parser/myreadline.c b/Parser/myreadline.c index 43e5583b8bcc4..d7ed357faa383 100644 --- a/Parser/myreadline.c +++ b/Parser/myreadline.c @@ -25,25 +25,36 @@ static PyThread_type_lock _PyOS_ReadlineLock = NULL; int (*PyOS_InputHook)(void) = NULL; /* This function restarts a fgets() after an EINTR error occurred - except if PyOS_InterruptOccurred() returns true. */ + except if _PyOS_InterruptOccurred() returns true. */ static int -my_fgets(char *buf, int len, FILE *fp) +my_fgets(PyThreadState* tstate, char *buf, int len, FILE *fp) { #ifdef MS_WINDOWS - HANDLE hInterruptEvent; + HANDLE handle; + _Py_BEGIN_SUPPRESS_IPH + handle = (HANDLE)_get_osfhandle(fileno(fp)); + _Py_END_SUPPRESS_IPH + + /* bpo-40826: fgets(fp) does crash if fileno(fp) is closed */ + if (handle == INVALID_HANDLE_VALUE) { + return -1; /* EOF */ + } #endif - char *p; - int err; + while (1) { - if (PyOS_InputHook != NULL) + if (PyOS_InputHook != NULL) { (void)(PyOS_InputHook)(); + } + errno = 0; clearerr(fp); - p = fgets(buf, len, fp); - if (p != NULL) + char *p = fgets(buf, len, fp); + if (p != NULL) { return 0; /* No error */ - err = errno; + } + int err = errno; + #ifdef MS_WINDOWS /* Ctrl-C anywhere on the line or Ctrl-Z if the only character on a line will set ERROR_OPERATION_ABORTED. Under normal @@ -59,7 +70,7 @@ my_fgets(char *buf, int len, FILE *fp) through to check for EOF. */ if (GetLastError()==ERROR_OPERATION_ABORTED) { - hInterruptEvent = _PyOS_SigintEvent(); + HANDLE hInterruptEvent = _PyOS_SigintEvent(); switch (WaitForSingleObjectEx(hInterruptEvent, 10, FALSE)) { case WAIT_OBJECT_0: ResetEvent(hInterruptEvent); @@ -69,23 +80,27 @@ my_fgets(char *buf, int len, FILE *fp) } } #endif /* MS_WINDOWS */ + if (feof(fp)) { clearerr(fp); return -1; /* EOF */ } + #ifdef EINTR if (err == EINTR) { - int s; - PyEval_RestoreThread(_PyOS_ReadlineTState); - s = PyErr_CheckSignals(); + PyEval_RestoreThread(tstate); + int s = PyErr_CheckSignals(); PyEval_SaveThread(); - if (s < 0) - return 1; - /* try again */ + + if (s < 0) { + return 1; + } + /* try again */ continue; } #endif - if (PyOS_InterruptOccurred()) { + + if (_PyOS_InterruptOccurred(tstate)) { return 1; /* Interrupt */ } return -2; /* Error */ @@ -99,7 +114,7 @@ my_fgets(char *buf, int len, FILE *fp) extern char _get_console_type(HANDLE handle); char * -_PyOS_WindowsConsoleReadline(HANDLE hStdIn) +_PyOS_WindowsConsoleReadline(PyThreadState *tstate, HANDLE hStdIn) { static wchar_t wbuf_local[1024 * 16]; const DWORD chunk_size = 1024; @@ -134,11 +149,12 @@ _PyOS_WindowsConsoleReadline(HANDLE hStdIn) if (WaitForSingleObjectEx(hInterruptEvent, 100, FALSE) == WAIT_OBJECT_0) { ResetEvent(hInterruptEvent); - PyEval_RestoreThread(_PyOS_ReadlineTState); + PyEval_RestoreThread(tstate); s = PyErr_CheckSignals(); PyEval_SaveThread(); - if (s < 0) + if (s < 0) { goto exit; + } } break; } @@ -151,17 +167,22 @@ _PyOS_WindowsConsoleReadline(HANDLE hStdIn) if (wbuf == wbuf_local) { wbuf[total_read] = '\0'; wbuf = (wchar_t*)PyMem_RawMalloc(wbuflen * sizeof(wchar_t)); - if (wbuf) + if (wbuf) { wcscpy_s(wbuf, wbuflen, wbuf_local); + } else { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); goto exit; } } else { wchar_t *tmp = PyMem_RawRealloc(wbuf, wbuflen * sizeof(wchar_t)); if (tmp == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); goto exit; } wbuf = tmp; @@ -170,33 +191,45 @@ _PyOS_WindowsConsoleReadline(HANDLE hStdIn) if (wbuf[0] == '\x1a') { buf = PyMem_RawMalloc(1); - if (buf) + if (buf) { buf[0] = '\0'; + } else { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); } goto exit; } - u8len = WideCharToMultiByte(CP_UTF8, 0, wbuf, total_read, NULL, 0, NULL, NULL); + u8len = WideCharToMultiByte(CP_UTF8, 0, + wbuf, total_read, + NULL, 0, + NULL, NULL); buf = PyMem_RawMalloc(u8len + 1); if (buf == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); goto exit; } - u8len = WideCharToMultiByte(CP_UTF8, 0, wbuf, total_read, buf, u8len, NULL, NULL); + + u8len = WideCharToMultiByte(CP_UTF8, 0, + wbuf, total_read, + buf, u8len, + NULL, NULL); buf[u8len] = '\0'; exit: - if (wbuf != wbuf_local) + if (wbuf != wbuf_local) { PyMem_RawFree(wbuf); + } if (err) { - PyEval_RestoreThread(_PyOS_ReadlineTState); + PyEval_RestoreThread(tstate); PyErr_SetFromWindowsErr(err); PyEval_SaveThread(); } - return buf; } @@ -210,6 +243,8 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) { size_t n; char *p, *pr; + PyThreadState *tstate = _PyOS_ReadlineTState; + assert(tstate != NULL); #ifdef MS_WINDOWS if (!Py_LegacyWindowsStdioFlag && sys_stdin == stdin) { @@ -231,7 +266,9 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) if (wlen) { wbuf = PyMem_RawMalloc(wlen * sizeof(wchar_t)); if (wbuf == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } wlen = MultiByteToWideChar(CP_UTF8, 0, prompt, -1, @@ -250,7 +287,7 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) } } clearerr(sys_stdin); - return _PyOS_WindowsConsoleReadline(hStdIn); + return _PyOS_WindowsConsoleReadline(tstate, hStdIn); } } #endif @@ -258,16 +295,19 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) n = 100; p = (char *)PyMem_RawMalloc(n); if (p == NULL) { + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } fflush(sys_stdout); - if (prompt) + if (prompt) { fprintf(stderr, "%s", prompt); + } fflush(stderr); - switch (my_fgets(p, (int)n, sys_stdin)) { + switch (my_fgets(tstate, p, (int)n, sys_stdin)) { case 0: /* Normal case */ break; case 1: /* Interrupt */ @@ -279,29 +319,40 @@ PyOS_StdioReadline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) *p = '\0'; break; } + n = strlen(p); while (n > 0 && p[n-1] != '\n') { size_t incr = n+2; if (incr > INT_MAX) { PyMem_RawFree(p); + PyEval_RestoreThread(tstate); PyErr_SetString(PyExc_OverflowError, "input line too long"); + PyEval_SaveThread(); return NULL; } + pr = (char *)PyMem_RawRealloc(p, n + incr); if (pr == NULL) { PyMem_RawFree(p); + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } p = pr; - if (my_fgets(p+n, (int)incr, sys_stdin) != 0) + + if (my_fgets(tstate, p+n, (int)incr, sys_stdin) != 0) { break; + } n += strlen(p+n); } + pr = (char *)PyMem_RawRealloc(p, n+1); if (pr == NULL) { PyMem_RawFree(p); + PyEval_RestoreThread(tstate); PyErr_NoMemory(); + PyEval_SaveThread(); return NULL; } return pr; @@ -324,7 +375,8 @@ PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) char *rv, *res; size_t len; - if (_PyOS_ReadlineTState == _PyThreadState_GET()) { + PyThreadState *tstate = _PyThreadState_GET(); + if (_PyOS_ReadlineTState == tstate) { PyErr_SetString(PyExc_RuntimeError, "can't re-enter readline"); return NULL; @@ -343,7 +395,7 @@ PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, const char *prompt) } } - _PyOS_ReadlineTState = _PyThreadState_GET(); + _PyOS_ReadlineTState = tstate; Py_BEGIN_ALLOW_THREADS PyThread_acquire_lock(_PyOS_ReadlineLock, 1); From webhook-mailer at python.org Thu Jun 4 01:19:40 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Thu, 04 Jun 2020 05:19:40 -0000 Subject: [Python-checkins] Fix MSVC warning in frameobject.c (GH-20590) Message-ID: https://github.com/python/cpython/commit/6e23a9c82b7fd2366003b9191cd93a9683b9d80c commit: 6e23a9c82b7fd2366003b9191cd93a9683b9d80c branch: master author: Ammar Askar committer: GitHub date: 2020-06-04T06:19:23+01:00 summary: Fix MSVC warning in frameobject.c (GH-20590) files: M Objects/frameobject.c diff --git a/Objects/frameobject.c b/Objects/frameobject.c index af32276c98b24..b6d073bd456d0 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -397,7 +397,9 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore return -1; } - int len = PyBytes_GET_SIZE(f->f_code->co_code)/sizeof(_Py_CODEUNIT); + int len = Py_SAFE_DOWNCAST( + PyBytes_GET_SIZE(f->f_code->co_code)/sizeof(_Py_CODEUNIT), + Py_ssize_t, int); int *lines = marklines(f->f_code, len); if (lines == NULL) { return -1; From webhook-mailer at python.org Thu Jun 4 08:23:52 2020 From: webhook-mailer at python.org (Mark Shannon) Date: Thu, 04 Jun 2020 12:23:52 -0000 Subject: [Python-checkins] Don't raise an exception on normal return from generator. (GH-19473) Message-ID: https://github.com/python/cpython/commit/50a48dad5579d67d7cae350f6ad5ae5c33f56abb commit: 50a48dad5579d67d7cae350f6ad5ae5c33f56abb branch: master author: Mark Shannon committer: GitHub date: 2020-06-04T13:23:35+01:00 summary: Don't raise an exception on normal return from generator. (GH-19473) files: A Misc/NEWS.d/next/Core and Builtins/2020-04-11-13-07-49.bpo-4022.Ctpn_F.rst M Objects/genobject.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-11-13-07-49.bpo-4022.Ctpn_F.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-11-13-07-49.bpo-4022.Ctpn_F.rst new file mode 100644 index 0000000000000..a13a8e8822683 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-04-11-13-07-49.bpo-4022.Ctpn_F.rst @@ -0,0 +1 @@ +Improve performance of generators by not raising internal StopIteration. diff --git a/Objects/genobject.c b/Objects/genobject.c index 09efbab69a7d3..1393f42533a59 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -231,7 +231,8 @@ gen_send_ex(PyGenObject *gen, PyObject *arg, int exc, int closing) if (PyAsyncGen_CheckExact(gen)) { PyErr_SetNone(PyExc_StopAsyncIteration); } - else { + else if (arg) { + /* Set exception if not called by gen_iternext() */ PyErr_SetNone(PyExc_StopIteration); } } From webhook-mailer at python.org Thu Jun 4 08:48:25 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Thu, 04 Jun 2020 12:48:25 -0000 Subject: [Python-checkins] bpo-17258: Add requires_hashdigest to multiprocessing tests (GH-20412) Message-ID: https://github.com/python/cpython/commit/b022e5cffbd3ff51ae361cf80f2a3b660be8b1ee commit: b022e5cffbd3ff51ae361cf80f2a3b660be8b1ee branch: master author: Christian Heimes committer: GitHub date: 2020-06-04T05:48:17-07:00 summary: bpo-17258: Add requires_hashdigest to multiprocessing tests (GH-20412) Skip some :mod:`multiprocessing` tests when MD5 hash digest is blocked. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Tests/2020-05-26-07-53-31.bpo-17258.X_IKTQ.rst M Lib/test/_test_multiprocessing.py M Lib/test/test_concurrent_futures.py diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index bbba2b45e5f03..d01a6680e409c 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -26,6 +26,7 @@ import test.support import test.support.script_helper from test import support +from test.support import hashlib_helper from test.support import socket_helper from test.support import threading_helper @@ -2954,6 +2955,8 @@ def test_remote(self): # Make queue finalizer run before the server is stopped del queue + + at hashlib_helper.requires_hashdigest('md5') class _TestManagerRestart(BaseTestCase): @classmethod @@ -3438,6 +3441,7 @@ def test_dont_merge(self): # @unittest.skipUnless(HAS_REDUCTION, "test needs multiprocessing.reduction") + at hashlib_helper.requires_hashdigest('md5') class _TestPicklingConnections(BaseTestCase): ALLOWED_TYPES = ('processes',) @@ -3740,6 +3744,7 @@ def test_copy(self): @unittest.skipUnless(HAS_SHMEM, "requires multiprocessing.shared_memory") + at hashlib_helper.requires_hashdigest('md5') class _TestSharedMemory(BaseTestCase): ALLOWED_TYPES = ('processes',) @@ -4415,6 +4420,7 @@ def test_invalid_handles(self): + at hashlib_helper.requires_hashdigest('md5') class OtherTest(unittest.TestCase): # TODO: add more tests for deliver/answer challenge. def test_deliver_challenge_auth_failure(self): @@ -4451,6 +4457,7 @@ def send_bytes(self, data): def initializer(ns): ns.test += 1 + at hashlib_helper.requires_hashdigest('md5') class TestInitializers(unittest.TestCase): def setUp(self): self.mgr = multiprocessing.Manager() @@ -5305,6 +5312,7 @@ def is_alive(self): any(process.is_alive() for process in forked_processes)) + at hashlib_helper.requires_hashdigest('md5') class TestSyncManagerTypes(unittest.TestCase): """Test all the types which can be shared between a parent and a child process by using a manager which acts as an intermediary @@ -5699,6 +5707,8 @@ def install_tests_in_module_dict(remote_globs, start_method): Mixin = local_globs[type_.capitalize() + 'Mixin'] class Temp(base, Mixin, unittest.TestCase): pass + if type_ == 'manager': + Temp = hashlib_helper.requires_hashdigest('md5')(Temp) Temp.__name__ = Temp.__qualname__ = newname Temp.__module__ = __module__ remote_globs[newname] = Temp diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py index 3b74949a5f61d..0ed75e6098a80 100644 --- a/Lib/test/test_concurrent_futures.py +++ b/Lib/test/test_concurrent_futures.py @@ -6,6 +6,7 @@ # Skip tests if sem_open implementation is broken. support.import_module('multiprocessing.synchronize') +from test.support import hashlib_helper from test.support.script_helper import assert_python_ok import contextlib @@ -953,6 +954,7 @@ def test_traceback(self): self.assertIn('raise RuntimeError(123) # some comment', f1.getvalue()) + @hashlib_helper.requires_hashdigest('md5') def test_ressources_gced_in_workers(self): # Ensure that argument for a job are correctly gc-ed after the job # is finished diff --git a/Misc/NEWS.d/next/Tests/2020-05-26-07-53-31.bpo-17258.X_IKTQ.rst b/Misc/NEWS.d/next/Tests/2020-05-26-07-53-31.bpo-17258.X_IKTQ.rst new file mode 100644 index 0000000000000..0a4b329b802e3 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-05-26-07-53-31.bpo-17258.X_IKTQ.rst @@ -0,0 +1 @@ +Skip some :mod:`multiprocessing` tests when MD5 hash digest is blocked. From webhook-mailer at python.org Thu Jun 4 09:19:10 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 04 Jun 2020 13:19:10 -0000 Subject: [Python-checkins] bpo-40679: Fix _PyEval_EvalCode() crash if qualname is NULL (GH-20615) Message-ID: https://github.com/python/cpython/commit/232dda6cbc10860328a83517a6e3ea238ff4147f commit: 232dda6cbc10860328a83517a6e3ea238ff4147f branch: master author: Victor Stinner committer: GitHub date: 2020-06-04T15:19:02+02:00 summary: bpo-40679: Fix _PyEval_EvalCode() crash if qualname is NULL (GH-20615) If name is NULL, name is now set to co->co_name. If qualname is NULL, qualname is now set to name. qualname must not be NULL: it is used to build error messages. Cleanup also the code: declare variables where they are initialized. Rename "name" local variables to "varname" to avoid overriding "name" parameter. files: A Misc/NEWS.d/next/C API/2020-06-03-17-48-13.bpo-40679.3sgWma.rst M Python/ceval.c diff --git a/Misc/NEWS.d/next/C API/2020-06-03-17-48-13.bpo-40679.3sgWma.rst b/Misc/NEWS.d/next/C API/2020-06-03-17-48-13.bpo-40679.3sgWma.rst new file mode 100644 index 0000000000000..ccf908cef1914 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-03-17-48-13.bpo-40679.3sgWma.rst @@ -0,0 +1 @@ +Fix a ``_PyEval_EvalCode()`` crash if *qualname* argument is NULL. diff --git a/Python/ceval.c b/Python/ceval.c index 9ab8329d6d8e7..d1d0779318571 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -4107,14 +4107,22 @@ _PyEval_EvalCode(PyThreadState *tstate, { assert(is_tstate_valid(tstate)); - PyCodeObject* co = (PyCodeObject*)_co; - PyFrameObject *f; + PyCodeObject *co = (PyCodeObject*)_co; + + if (!name) { + name = co->co_name; + } + assert(name != NULL); + assert(PyUnicode_Check(name)); + + if (!qualname) { + qualname = name; + } + assert(qualname != NULL); + assert(PyUnicode_Check(qualname)); + PyObject *retval = NULL; - PyObject **fastlocals, **freevars; - PyObject *x, *u; const Py_ssize_t total_args = co->co_argcount + co->co_kwonlyargcount; - Py_ssize_t i, j, n; - PyObject *kwdict; if (globals == NULL) { _PyErr_SetString(tstate, PyExc_SystemError, @@ -4123,14 +4131,16 @@ _PyEval_EvalCode(PyThreadState *tstate, } /* Create the frame */ - f = _PyFrame_New_NoTrack(tstate, co, globals, locals); + PyFrameObject *f = _PyFrame_New_NoTrack(tstate, co, globals, locals); if (f == NULL) { return NULL; } - fastlocals = f->f_localsplus; - freevars = f->f_localsplus + co->co_nlocals; + PyObject **fastlocals = f->f_localsplus; + PyObject **freevars = f->f_localsplus + co->co_nlocals; /* Create a dictionary for keyword parameters (**kwags) */ + PyObject *kwdict; + Py_ssize_t i; if (co->co_flags & CO_VARKEYWORDS) { kwdict = PyDict_New(); if (kwdict == NULL) @@ -4146,6 +4156,7 @@ _PyEval_EvalCode(PyThreadState *tstate, } /* Copy all positional arguments into local variables */ + Py_ssize_t j, n; if (argcount > co->co_argcount) { n = co->co_argcount; } @@ -4153,14 +4164,14 @@ _PyEval_EvalCode(PyThreadState *tstate, n = argcount; } for (j = 0; j < n; j++) { - x = args[j]; + PyObject *x = args[j]; Py_INCREF(x); SETLOCAL(j, x); } /* Pack other positional arguments into the *args argument */ if (co->co_flags & CO_VARARGS) { - u = _PyTuple_FromArray(args + n, argcount - n); + PyObject *u = _PyTuple_FromArray(args + n, argcount - n); if (u == NULL) { goto fail; } @@ -4186,16 +4197,16 @@ _PyEval_EvalCode(PyThreadState *tstate, normally interned this should almost always hit. */ co_varnames = ((PyTupleObject *)(co->co_varnames))->ob_item; for (j = co->co_posonlyargcount; j < total_args; j++) { - PyObject *name = co_varnames[j]; - if (name == keyword) { + PyObject *varname = co_varnames[j]; + if (varname == keyword) { goto kw_found; } } /* Slow fallback, just in case */ for (j = co->co_posonlyargcount; j < total_args; j++) { - PyObject *name = co_varnames[j]; - int cmp = PyObject_RichCompareBool( keyword, name, Py_EQ); + PyObject *varname = co_varnames[j]; + int cmp = PyObject_RichCompareBool( keyword, varname, Py_EQ); if (cmp > 0) { goto kw_found; } @@ -4209,7 +4220,8 @@ _PyEval_EvalCode(PyThreadState *tstate, if (co->co_posonlyargcount && positional_only_passed_as_keyword(tstate, co, - kwcount, kwnames, qualname)) + kwcount, kwnames, + qualname)) { goto fail; } @@ -4238,7 +4250,8 @@ _PyEval_EvalCode(PyThreadState *tstate, /* Check the number of positional arguments */ if ((argcount > co->co_argcount) && !(co->co_flags & CO_VARARGS)) { - too_many_positional(tstate, co, argcount, defcount, fastlocals, qualname); + too_many_positional(tstate, co, argcount, defcount, fastlocals, + qualname); goto fail; } @@ -4252,7 +4265,8 @@ _PyEval_EvalCode(PyThreadState *tstate, } } if (missing) { - missing_arguments(tstate, co, missing, defcount, fastlocals, qualname); + missing_arguments(tstate, co, missing, defcount, fastlocals, + qualname); goto fail; } if (n > m) @@ -4272,12 +4286,11 @@ _PyEval_EvalCode(PyThreadState *tstate, if (co->co_kwonlyargcount > 0) { Py_ssize_t missing = 0; for (i = co->co_argcount; i < total_args; i++) { - PyObject *name; if (GETLOCAL(i) != NULL) continue; - name = PyTuple_GET_ITEM(co->co_varnames, i); + PyObject *varname = PyTuple_GET_ITEM(co->co_varnames, i); if (kwdefs != NULL) { - PyObject *def = PyDict_GetItemWithError(kwdefs, name); + PyObject *def = PyDict_GetItemWithError(kwdefs, varname); if (def) { Py_INCREF(def); SETLOCAL(i, def); @@ -4290,7 +4303,8 @@ _PyEval_EvalCode(PyThreadState *tstate, missing++; } if (missing) { - missing_arguments(tstate, co, missing, -1, fastlocals, qualname); + missing_arguments(tstate, co, missing, -1, fastlocals, + qualname); goto fail; } } From webhook-mailer at python.org Thu Jun 4 15:58:19 2020 From: webhook-mailer at python.org (Harsha Laxman) Date: Thu, 04 Jun 2020 19:58:19 -0000 Subject: [Python-checkins] Fix spacing in docs for tarfile (GH-20629) Message-ID: https://github.com/python/cpython/commit/7a280197f4162e5fcdde6f34701a9fa6e669190d commit: 7a280197f4162e5fcdde6f34701a9fa6e669190d branch: master author: Harsha Laxman committer: GitHub date: 2020-06-04T12:58:10-07:00 summary: Fix spacing in docs for tarfile (GH-20629) Before ``` content.txt is 42 bytes in size and isa regular file. folder is 420 bytes in size and isa directory. magic is 4200 bytes in size and issomething else. ``` After: ``` content.txt is 42 bytes in size and is a regular file. folder is 420 bytes in size and is a directory. magic is 4200 bytes in size and is something else. ``` Automerge-Triggered-By: @orsenthil files: M Doc/library/tarfile.rst diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index 459e4ad991d9d..c204263d3a094 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -787,7 +787,7 @@ How to read a gzip compressed tar archive and display some member information:: import tarfile tar = tarfile.open("sample.tar.gz", "r:gz") for tarinfo in tar: - print(tarinfo.name, "is", tarinfo.size, "bytes in size and is", end="") + print(tarinfo.name, "is", tarinfo.size, "bytes in size and is ", end="") if tarinfo.isreg(): print("a regular file.") elif tarinfo.isdir(): From webhook-mailer at python.org Thu Jun 4 16:08:48 2020 From: webhook-mailer at python.org (Erlend Egeberg Aasland) Date: Thu, 04 Jun 2020 20:08:48 -0000 Subject: [Python-checkins] bpo-40865: Remove unused insint() macro from hash modules (GH-20627) Message-ID: https://github.com/python/cpython/commit/6ed578f6dbffdec94f62cc2e36d626fc195678d7 commit: 6ed578f6dbffdec94f62cc2e36d626fc195678d7 branch: master author: Erlend Egeberg Aasland committer: GitHub date: 2020-06-04T13:08:42-07:00 summary: bpo-40865: Remove unused insint() macro from hash modules (GH-20627) Automerge-Triggered-By: @tiran files: M Modules/md5module.c M Modules/sha1module.c M Modules/sha256module.c M Modules/sha512module.c diff --git a/Modules/md5module.c b/Modules/md5module.c index ea2bafb9b65e8..e4d9db40f22df 100644 --- a/Modules/md5module.c +++ b/Modules/md5module.c @@ -552,9 +552,6 @@ static struct PyMethodDef MD5_functions[] = { /* Initialize this module. */ -#define insint(n,v) { PyModule_AddIntConstant(m,n,v); } - - static struct PyModuleDef _md5module = { PyModuleDef_HEAD_INIT, "_md5", diff --git a/Modules/sha1module.c b/Modules/sha1module.c index e066b88022941..b0656d83b3ae8 100644 --- a/Modules/sha1module.c +++ b/Modules/sha1module.c @@ -529,9 +529,6 @@ static struct PyMethodDef SHA1_functions[] = { /* Initialize this module. */ -#define insint(n,v) { PyModule_AddIntConstant(m,n,v); } - - static struct PyModuleDef _sha1module = { PyModuleDef_HEAD_INIT, "_sha1", diff --git a/Modules/sha256module.c b/Modules/sha256module.c index e0ff9b2b3a187..8edb1d5382883 100644 --- a/Modules/sha256module.c +++ b/Modules/sha256module.c @@ -684,9 +684,6 @@ static struct PyMethodDef SHA_functions[] = { /* Initialize this module. */ -#define insint(n,v) { PyModule_AddIntConstant(m,n,v); } - - static struct PyModuleDef _sha256module = { PyModuleDef_HEAD_INIT, "_sha256", diff --git a/Modules/sha512module.c b/Modules/sha512module.c index 780f8e7f06c9e..561ef8ef0e867 100644 --- a/Modules/sha512module.c +++ b/Modules/sha512module.c @@ -741,9 +741,6 @@ static struct PyMethodDef SHA_functions[] = { /* Initialize this module. */ -#define insint(n,v) { PyModule_AddIntConstant(m,n,v); } - - static struct PyModuleDef _sha512module = { PyModuleDef_HEAD_INIT, "_sha512", From webhook-mailer at python.org Thu Jun 4 16:10:47 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 04 Jun 2020 20:10:47 -0000 Subject: [Python-checkins] bpo-39573: Porting to Python 3.10: Py_SET_SIZE() macro (GH-20610) Message-ID: https://github.com/python/cpython/commit/dc24b8a2ac32114313bae519db3ccc21fe45c982 commit: dc24b8a2ac32114313bae519db3ccc21fe45c982 branch: master author: Victor Stinner committer: GitHub date: 2020-06-04T22:10:43+02:00 summary: bpo-39573: Porting to Python 3.10: Py_SET_SIZE() macro (GH-20610) In What's New in Python 3.10, propose Py_SET_SIZE(), Py_SET_REFCNT() and Py_SET_TYPE() macros for backward compatibility with Python 3.9 and older. files: M Doc/whatsnew/3.10.rst diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 0b656475b7167..1234b2e6bbf27 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -135,17 +135,35 @@ Porting to Python 3.10 * Since :c:func:`Py_TYPE()` is changed to the inline static function, ``Py_TYPE(obj) = new_type`` must be replaced with ``Py_SET_TYPE(obj, new_type)``: - see :c:func:`Py_SET_TYPE()` (available since Python 3.9). + see :c:func:`Py_SET_TYPE()` (available since Python 3.9). For backward + compatibility, this macro can be used:: + + #if PY_VERSION_HEX < 0x030900A4 + # define Py_SET_TYPE(obj, type) ((Py_TYPE(obj) = (type)), (void)0) + #endif + (Contributed by Dong-hee Na in :issue:`39573`.) * Since :c:func:`Py_REFCNT()` is changed to the inline static function, ``Py_REFCNT(obj) = new_refcnt`` must be replaced with ``Py_SET_REFCNT(obj, new_refcnt)``: - see :c:func:`Py_SET_REFCNT()` (available since Python 3.9). + see :c:func:`Py_SET_REFCNT()` (available since Python 3.9). For backward + compatibility, this macro can be used:: + + #if PY_VERSION_HEX < 0x030900A4 + # define Py_SET_REFCNT(obj, refcnt) ((Py_REFCNT(obj) = (refcnt)), (void)0) + #endif + (Contributed by Victor Stinner in :issue:`39573`.) * Since :c:func:`Py_SIZE()` is changed to the inline static function, ``Py_SIZE(obj) = new_size`` must be replaced with ``Py_SET_SIZE(obj, new_size)``: - see :c:func:`Py_SET_SIZE()` (available since Python 3.9). + see :c:func:`Py_SET_SIZE()` (available since Python 3.9). For backward + compatibility, this macro can be used:: + + #if PY_VERSION_HEX < 0x030900A4 + # define Py_SET_SIZE(obj, size) ((Py_SIZE(obj) = (size)), (void)0) + #endif + (Contributed by Victor Stinner in :issue:`39573`.) * Calling :c:func:`PyDict_GetItem` without :term:`GIL` held had been allowed From webhook-mailer at python.org Thu Jun 4 16:19:51 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 04 Jun 2020 20:19:51 -0000 Subject: [Python-checkins] Fix spacing in docs for tarfile (GH-20629) Message-ID: https://github.com/python/cpython/commit/c935b33322843b3abfd930cd24b8806f3923f1cc commit: c935b33322843b3abfd930cd24b8806f3923f1cc branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-04T13:19:47-07:00 summary: Fix spacing in docs for tarfile (GH-20629) Before ``` content.txt is 42 bytes in size and isa regular file. folder is 420 bytes in size and isa directory. magic is 4200 bytes in size and issomething else. ``` After: ``` content.txt is 42 bytes in size and is a regular file. folder is 420 bytes in size and is a directory. magic is 4200 bytes in size and is something else. ``` Automerge-Triggered-By: @orsenthil (cherry picked from commit 7a280197f4162e5fcdde6f34701a9fa6e669190d) Co-authored-by: Harsha Laxman files: M Doc/library/tarfile.rst diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index c34f2c4a57024..d60f1c8a5f2d8 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -784,7 +784,7 @@ How to read a gzip compressed tar archive and display some member information:: import tarfile tar = tarfile.open("sample.tar.gz", "r:gz") for tarinfo in tar: - print(tarinfo.name, "is", tarinfo.size, "bytes in size and is", end="") + print(tarinfo.name, "is", tarinfo.size, "bytes in size and is ", end="") if tarinfo.isreg(): print("a regular file.") elif tarinfo.isdir(): From webhook-mailer at python.org Thu Jun 4 16:19:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 04 Jun 2020 20:19:56 -0000 Subject: [Python-checkins] Fix spacing in docs for tarfile (GH-20629) Message-ID: https://github.com/python/cpython/commit/6bfbe773bd84323a0894428db48a4d190b2909b9 commit: 6bfbe773bd84323a0894428db48a4d190b2909b9 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-04T13:19:51-07:00 summary: Fix spacing in docs for tarfile (GH-20629) Before ``` content.txt is 42 bytes in size and isa regular file. folder is 420 bytes in size and isa directory. magic is 4200 bytes in size and issomething else. ``` After: ``` content.txt is 42 bytes in size and is a regular file. folder is 420 bytes in size and is a directory. magic is 4200 bytes in size and is something else. ``` Automerge-Triggered-By: @orsenthil (cherry picked from commit 7a280197f4162e5fcdde6f34701a9fa6e669190d) Co-authored-by: Harsha Laxman files: M Doc/library/tarfile.rst diff --git a/Doc/library/tarfile.rst b/Doc/library/tarfile.rst index 9cd07158e7f62..aa28441658973 100644 --- a/Doc/library/tarfile.rst +++ b/Doc/library/tarfile.rst @@ -779,7 +779,7 @@ How to read a gzip compressed tar archive and display some member information:: import tarfile tar = tarfile.open("sample.tar.gz", "r:gz") for tarinfo in tar: - print(tarinfo.name, "is", tarinfo.size, "bytes in size and is", end="") + print(tarinfo.name, "is", tarinfo.size, "bytes in size and is ", end="") if tarinfo.isreg(): print("a regular file.") elif tarinfo.isdir(): From webhook-mailer at python.org Thu Jun 4 17:38:44 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 04 Jun 2020 21:38:44 -0000 Subject: [Python-checkins] bpo-40521: Make tuple free list per-interpreter (GH-20247) Message-ID: https://github.com/python/cpython/commit/69ac6e58fd98de339c013fe64cd1cf763e4f9bca commit: 69ac6e58fd98de339c013fe64cd1cf763e4f9bca branch: master author: Victor Stinner committer: GitHub date: 2020-06-04T23:38:36+02:00 summary: bpo-40521: Make tuple free list per-interpreter (GH-20247) Each interpreter now has its own tuple free lists: * Move tuple numfree and free_list arrays into PyInterpreterState. * Define PyTuple_MAXSAVESIZE and PyTuple_MAXFREELIST macros in pycore_interp.h. * Add _Py_tuple_state structure. Pass it explicitly to tuple_alloc(). * Add tstate parameter to _PyTuple_ClearFreeList() * Each interpreter now has its own empty tuple singleton. files: A Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Include/internal/pycore_gc.h M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Modules/gcmodule.c M Objects/tupleobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 0511eea779a7e..e8e5d32977095 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -166,7 +166,7 @@ PyAPI_FUNC(void) _PyGC_InitState(struct _gc_runtime_state *); // Functions to clear types free lists extern void _PyFrame_ClearFreeList(void); -extern void _PyTuple_ClearFreeList(void); +extern void _PyTuple_ClearFreeList(PyThreadState *tstate); extern void _PyFloat_ClearFreeList(void); extern void _PyList_ClearFreeList(void); extern void _PyDict_ClearFreeList(void); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index f04ea330d0457..b90bfbe797b58 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -64,6 +64,26 @@ struct _Py_unicode_state { struct _Py_unicode_fs_codec fs_codec; }; +/* Speed optimization to avoid frequent malloc/free of small tuples */ +#ifndef PyTuple_MAXSAVESIZE + // Largest tuple to save on free list +# define PyTuple_MAXSAVESIZE 20 +#endif +#ifndef PyTuple_MAXFREELIST + // Maximum number of tuples of each size to save +# define PyTuple_MAXFREELIST 2000 +#endif + +struct _Py_tuple_state { +#if PyTuple_MAXSAVESIZE > 0 + /* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, + entry 0 is the empty tuple () of which at most one instance + will be allocated. */ + PyTupleObject *free_list[PyTuple_MAXSAVESIZE]; + int numfree[PyTuple_MAXSAVESIZE]; +#endif +}; + /* interpreter state */ @@ -157,6 +177,7 @@ struct _is { */ PyLongObject* small_ints[_PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS]; #endif + struct _Py_tuple_state tuple; }; /* Used by _PyImport_Cleanup() */ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 77ea3f27454da..3f2ff5bfd2410 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -60,7 +60,7 @@ extern PyStatus _PyGC_Init(PyThreadState *tstate); extern void _PyFrame_Fini(void); extern void _PyDict_Fini(void); -extern void _PyTuple_Fini(void); +extern void _PyTuple_Fini(PyThreadState *tstate); extern void _PyList_Fini(void); extern void _PySet_Fini(void); extern void _PyBytes_Fini(void); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst new file mode 100644 index 0000000000000..f364d36a135f2 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -0,0 +1 @@ +Each interpreter now has its own tuple free lists and empty tuple singleton. diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index a44752b1cc4da..1f5aa936e41c7 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1025,8 +1025,9 @@ delete_garbage(PyThreadState *tstate, GCState *gcstate, static void clear_freelists(void) { + PyThreadState *tstate = _PyThreadState_GET(); _PyFrame_ClearFreeList(); - _PyTuple_ClearFreeList(); + _PyTuple_ClearFreeList(tstate); _PyFloat_ClearFreeList(); _PyList_ClearFreeList(); _PyDict_ClearFreeList(); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 43706c22b9291..951cd1faf7e8f 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -14,28 +14,6 @@ class tuple "PyTupleObject *" "&PyTuple_Type" #include "clinic/tupleobject.c.h" -/* Speed optimization to avoid frequent malloc/free of small tuples */ -#ifndef PyTuple_MAXSAVESIZE -#define PyTuple_MAXSAVESIZE 20 /* Largest tuple to save on free list */ -#endif -#ifndef PyTuple_MAXFREELIST -#define PyTuple_MAXFREELIST 2000 /* Maximum number of tuples of each size to save */ -#endif - -/* bpo-40521: tuple free lists are shared by all interpreters. */ -#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS -# undef PyTuple_MAXSAVESIZE -# define PyTuple_MAXSAVESIZE 0 -#endif - -#if PyTuple_MAXSAVESIZE > 0 -/* Entries 1 up to PyTuple_MAXSAVESIZE are free lists, entry 0 is the empty - tuple () of which at most one instance will be allocated. -*/ -static PyTupleObject *free_list[PyTuple_MAXSAVESIZE]; -static int numfree[PyTuple_MAXSAVESIZE]; -#endif - static inline void tuple_gc_track(PyTupleObject *op) { @@ -47,14 +25,14 @@ void _PyTuple_DebugMallocStats(FILE *out) { #if PyTuple_MAXSAVESIZE > 0 - int i; - char buf[128]; - for (i = 1; i < PyTuple_MAXSAVESIZE; i++) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; + for (int i = 1; i < PyTuple_MAXSAVESIZE; i++) { + char buf[128]; PyOS_snprintf(buf, sizeof(buf), "free %d-sized PyTupleObject", i); - _PyDebugAllocatorStats(out, - buf, - numfree[i], _PyObject_VAR_SIZE(&PyTuple_Type, i)); + _PyDebugAllocatorStats(out, buf, state->numfree[i], + _PyObject_VAR_SIZE(&PyTuple_Type, i)); } #endif } @@ -68,7 +46,7 @@ _PyTuple_DebugMallocStats(FILE *out) which wraps this function). */ static PyTupleObject * -tuple_alloc(Py_ssize_t size) +tuple_alloc(struct _Py_tuple_state *state, Py_ssize_t size) { PyTupleObject *op; if (size < 0) { @@ -76,10 +54,10 @@ tuple_alloc(Py_ssize_t size) return NULL; } #if PyTuple_MAXSAVESIZE > 0 - if (size < PyTuple_MAXSAVESIZE && (op = free_list[size]) != NULL) { + if (size < PyTuple_MAXSAVESIZE && (op = state->free_list[size]) != NULL) { assert(size != 0); - free_list[size] = (PyTupleObject *) op->ob_item[0]; - numfree[size]--; + state->free_list[size] = (PyTupleObject *) op->ob_item[0]; + state->numfree[size]--; /* Inline PyObject_InitVar */ #ifdef Py_TRACE_REFS Py_SET_SIZE(op, size); @@ -107,13 +85,15 @@ PyTuple_New(Py_ssize_t size) { PyTupleObject *op; #if PyTuple_MAXSAVESIZE > 0 - if (size == 0 && free_list[0]) { - op = free_list[0]; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; + if (size == 0 && state->free_list[0]) { + op = state->free_list[0]; Py_INCREF(op); return (PyObject *) op; } #endif - op = tuple_alloc(size); + op = tuple_alloc(state, size); if (op == NULL) { return NULL; } @@ -122,8 +102,8 @@ PyTuple_New(Py_ssize_t size) } #if PyTuple_MAXSAVESIZE > 0 if (size == 0) { - free_list[0] = op; - ++numfree[0]; + state->free_list[0] = op; + ++state->numfree[0]; Py_INCREF(op); /* extra INCREF so that this is never freed */ } #endif @@ -210,8 +190,11 @@ PyTuple_Pack(Py_ssize_t n, ...) return PyTuple_New(0); } + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; + va_start(vargs, n); - PyTupleObject *result = tuple_alloc(n); + PyTupleObject *result = tuple_alloc(state, n); if (result == NULL) { va_end(vargs); return NULL; @@ -233,22 +216,24 @@ PyTuple_Pack(Py_ssize_t n, ...) static void tupledealloc(PyTupleObject *op) { - Py_ssize_t i; Py_ssize_t len = Py_SIZE(op); PyObject_GC_UnTrack(op); Py_TRASHCAN_BEGIN(op, tupledealloc) if (len > 0) { - i = len; - while (--i >= 0) + Py_ssize_t i = len; + while (--i >= 0) { Py_XDECREF(op->ob_item[i]); + } #if PyTuple_MAXSAVESIZE > 0 + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; if (len < PyTuple_MAXSAVESIZE && - numfree[len] < PyTuple_MAXFREELIST && + state->numfree[len] < PyTuple_MAXFREELIST && Py_IS_TYPE(op, &PyTuple_Type)) { - op->ob_item[0] = (PyObject *) free_list[len]; - numfree[len]++; - free_list[len] = op; + op->ob_item[0] = (PyObject *) state->free_list[len]; + state->numfree[len]++; + state->free_list[len] = op; goto done; /* return */ } #endif @@ -423,7 +408,9 @@ _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) return PyTuple_New(0); } - PyTupleObject *tuple = tuple_alloc(n); + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; + PyTupleObject *tuple = tuple_alloc(state, n); if (tuple == NULL) { return NULL; } @@ -481,7 +468,8 @@ tupleconcat(PyTupleObject *a, PyObject *bb) Py_TYPE(bb)->tp_name); return NULL; } -#define b ((PyTupleObject *)bb) + PyTupleObject *b = (PyTupleObject *)bb; + if (Py_SIZE(b) == 0 && PyTuple_CheckExact(a)) { Py_INCREF(a); return (PyObject *)a; @@ -492,7 +480,9 @@ tupleconcat(PyTupleObject *a, PyObject *bb) return PyTuple_New(0); } - np = tuple_alloc(size); + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; + np = tuple_alloc(state, size); if (np == NULL) { return NULL; } @@ -512,7 +502,6 @@ tupleconcat(PyTupleObject *a, PyObject *bb) } tuple_gc_track(np); return (PyObject *)np; -#undef b } static PyObject * @@ -536,7 +525,9 @@ tuplerepeat(PyTupleObject *a, Py_ssize_t n) if (n > PY_SSIZE_T_MAX / Py_SIZE(a)) return PyErr_NoMemory(); size = Py_SIZE(a) * n; - np = tuple_alloc(size); + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; + np = tuple_alloc(state, size); if (np == NULL) return NULL; p = np->ob_item; @@ -801,7 +792,9 @@ tuplesubscript(PyTupleObject* self, PyObject* item) return (PyObject *)self; } else { - PyTupleObject* result = tuple_alloc(slicelength); + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_tuple_state *state = &interp->tuple; + PyTupleObject* result = tuple_alloc(state, slicelength); if (!result) return NULL; src = self->ob_item; @@ -963,13 +956,14 @@ _PyTuple_Resize(PyObject **pv, Py_ssize_t newsize) } void -_PyTuple_ClearFreeList(void) +_PyTuple_ClearFreeList(PyThreadState *tstate) { #if PyTuple_MAXSAVESIZE > 0 + struct _Py_tuple_state *state = &tstate->interp->tuple; for (Py_ssize_t i = 1; i < PyTuple_MAXSAVESIZE; i++) { - PyTupleObject *p = free_list[i]; - free_list[i] = NULL; - numfree[i] = 0; + PyTupleObject *p = state->free_list[i]; + state->free_list[i] = NULL; + state->numfree[i] = 0; while (p) { PyTupleObject *q = p; p = (PyTupleObject *)(p->ob_item[0]); @@ -981,14 +975,15 @@ _PyTuple_ClearFreeList(void) } void -_PyTuple_Fini(void) +_PyTuple_Fini(PyThreadState *tstate) { #if PyTuple_MAXSAVESIZE > 0 + struct _Py_tuple_state *state = &tstate->interp->tuple; /* empty tuples are used all over the place and applications may * rely on the fact that an empty tuple is a singleton. */ - Py_CLEAR(free_list[0]); + Py_CLEAR(state->free_list[0]); - _PyTuple_ClearFreeList(); + _PyTuple_ClearFreeList(tstate); #endif } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index da66a82ada70a..9da3fb09c38ba 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1252,7 +1252,9 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) if (is_main_interp) { /* Sundry finalizers */ _PyFrame_Fini(); - _PyTuple_Fini(); + } + _PyTuple_Fini(tstate); + if (is_main_interp) { _PyList_Fini(); _PySet_Fini(); _PyBytes_Fini(); From webhook-mailer at python.org Thu Jun 4 18:50:20 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 04 Jun 2020 22:50:20 -0000 Subject: [Python-checkins] bpo-40521: Make float free list per-interpreter (GH-20636) Message-ID: https://github.com/python/cpython/commit/2ba59370c3dda2ac229c14510e53a05074b133d1 commit: 2ba59370c3dda2ac229c14510e53a05074b133d1 branch: master author: Victor Stinner committer: GitHub date: 2020-06-05T00:50:05+02:00 summary: bpo-40521: Make float free list per-interpreter (GH-20636) Each interpreter now has its own float free list: * Move tuple numfree and free_list into PyInterpreterState. * Add _Py_float_state structure. * Add tstate parameter to _PyFloat_ClearFreeList() and _PyFloat_Fini(). files: M Include/internal/pycore_gc.h M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Modules/gcmodule.c M Objects/floatobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index e8e5d32977095..f90d80be16878 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -167,7 +167,7 @@ PyAPI_FUNC(void) _PyGC_InitState(struct _gc_runtime_state *); // Functions to clear types free lists extern void _PyFrame_ClearFreeList(void); extern void _PyTuple_ClearFreeList(PyThreadState *tstate); -extern void _PyFloat_ClearFreeList(void); +extern void _PyFloat_ClearFreeList(PyThreadState *tstate); extern void _PyList_ClearFreeList(void); extern void _PyDict_ClearFreeList(void); extern void _PyAsyncGen_ClearFreeLists(void); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index b90bfbe797b58..c0eed00f36581 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -84,6 +84,14 @@ struct _Py_tuple_state { #endif }; +struct _Py_float_state { + /* Special free list + free_list is a singly-linked list of available PyFloatObjects, + linked via abuse of their ob_type members. */ + int numfree; + PyFloatObject *free_list; +}; + /* interpreter state */ @@ -178,6 +186,7 @@ struct _is { PyLongObject* small_ints[_PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS]; #endif struct _Py_tuple_state tuple; + struct _Py_float_state float_state; }; /* Used by _PyImport_Cleanup() */ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 3f2ff5bfd2410..2643abca0f553 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -64,7 +64,7 @@ extern void _PyTuple_Fini(PyThreadState *tstate); extern void _PyList_Fini(void); extern void _PySet_Fini(void); extern void _PyBytes_Fini(void); -extern void _PyFloat_Fini(void); +extern void _PyFloat_Fini(PyThreadState *tstate); extern void _PySlice_Fini(void); extern void _PyAsyncGen_Fini(void); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index f364d36a135f2..016f11668ee44 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1 +1,2 @@ -Each interpreter now has its own tuple free lists and empty tuple singleton. +Tuple free lists, empty tuple singleton, and float free list are no longer +shared by all interpreters: each interpreter now its own free lists. diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 1f5aa936e41c7..0bad0f8917f37 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1028,7 +1028,7 @@ clear_freelists(void) PyThreadState *tstate = _PyThreadState_GET(); _PyFrame_ClearFreeList(); _PyTuple_ClearFreeList(tstate); - _PyFloat_ClearFreeList(); + _PyFloat_ClearFreeList(tstate); _PyList_ClearFreeList(); _PyDict_ClearFreeList(); _PyAsyncGen_ClearFreeLists(); diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 868b7298a9e8d..d72fd21f95faf 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -5,6 +5,8 @@ #include "Python.h" #include "pycore_dtoa.h" +#include "pycore_interp.h" // _PyInterpreterState.float_state +#include "pycore_pystate.h" // _PyInterpreterState_GET() #include #include @@ -16,16 +18,9 @@ class float "PyObject *" "&PyFloat_Type" #include "clinic/floatobject.c.h" -/* Special free list - free_list is a singly-linked list of available PyFloatObjects, linked - via abuse of their ob_type members. -*/ - #ifndef PyFloat_MAXFREELIST -#define PyFloat_MAXFREELIST 100 +# define PyFloat_MAXFREELIST 100 #endif -static int numfree = 0; -static PyFloatObject *free_list = NULL; double PyFloat_GetMax(void) @@ -117,16 +112,19 @@ PyFloat_GetInfo(void) PyObject * PyFloat_FromDouble(double fval) { - PyFloatObject *op = free_list; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_float_state *state = &interp->float_state; + PyFloatObject *op = state->free_list; if (op != NULL) { - free_list = (PyFloatObject *) Py_TYPE(op); - numfree--; - } else { - op = (PyFloatObject*) PyObject_MALLOC(sizeof(PyFloatObject)); - if (!op) + state->free_list = (PyFloatObject *) Py_TYPE(op); + state->numfree--; + } + else { + op = PyObject_Malloc(sizeof(PyFloatObject)); + if (!op) { return PyErr_NoMemory(); + } } - /* Inline PyObject_New */ (void)PyObject_INIT(op, &PyFloat_Type); op->ob_fval = fval; return (PyObject *) op; @@ -219,13 +217,15 @@ static void float_dealloc(PyFloatObject *op) { if (PyFloat_CheckExact(op)) { - if (numfree >= PyFloat_MAXFREELIST) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_float_state *state = &interp->float_state; + if (state->numfree >= PyFloat_MAXFREELIST) { PyObject_FREE(op); return; } - numfree++; - Py_SET_TYPE(op, (PyTypeObject *)free_list); - free_list = op; + state->numfree++; + Py_SET_TYPE(op, (PyTypeObject *)state->free_list); + state->free_list = op; } else Py_TYPE(op)->tp_free((PyObject *)op); @@ -1981,30 +1981,33 @@ _PyFloat_Init(void) } void -_PyFloat_ClearFreeList(void) +_PyFloat_ClearFreeList(PyThreadState *tstate) { - PyFloatObject *f = free_list, *next; + struct _Py_float_state *state = &tstate->interp->float_state; + PyFloatObject *f = state->free_list, *next; for (; f; f = next) { next = (PyFloatObject*) Py_TYPE(f); PyObject_FREE(f); } - free_list = NULL; - numfree = 0; + state->free_list = NULL; + state->numfree = 0; } void -_PyFloat_Fini(void) +_PyFloat_Fini(PyThreadState *tstate) { - _PyFloat_ClearFreeList(); + _PyFloat_ClearFreeList(tstate); } /* Print summary info about the state of the optimized allocator */ void _PyFloat_DebugMallocStats(FILE *out) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_float_state *state = &interp->float_state; _PyDebugAllocatorStats(out, "free PyFloatObject", - numfree, sizeof(PyFloatObject)); + state->numfree, sizeof(PyFloatObject)); } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 9da3fb09c38ba..716303cffc764 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1261,9 +1261,9 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) } _PyLong_Fini(tstate); + _PyFloat_Fini(tstate); if (is_main_interp) { - _PyFloat_Fini(); _PyDict_Fini(); _PySlice_Fini(); } From webhook-mailer at python.org Thu Jun 4 19:14:48 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 04 Jun 2020 23:14:48 -0000 Subject: [Python-checkins] bpo-40521: Make slice cache per-interpreter (GH-20637) Message-ID: https://github.com/python/cpython/commit/7daba6f221e713f7f60c613b246459b07d179f91 commit: 7daba6f221e713f7f60c613b246459b07d179f91 branch: master author: Victor Stinner committer: GitHub date: 2020-06-05T01:14:40+02:00 summary: bpo-40521: Make slice cache per-interpreter (GH-20637) Each interpreter now has its own slice cache: * Move slice cache into PyInterpreterState. * Add tstate parameter to _PySlice_Fini(). files: M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Objects/sliceobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index c0eed00f36581..70054efe7ec71 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -187,6 +187,10 @@ struct _is { #endif struct _Py_tuple_state tuple; struct _Py_float_state float_state; + + /* Using a cache is very effective since typically only a single slice is + created and then deleted again. */ + PySliceObject *slice_cache; }; /* Used by _PyImport_Cleanup() */ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 2643abca0f553..bba9bd9b2bdb2 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -65,7 +65,7 @@ extern void _PyList_Fini(void); extern void _PySet_Fini(void); extern void _PyBytes_Fini(void); extern void _PyFloat_Fini(PyThreadState *tstate); -extern void _PySlice_Fini(void); +extern void _PySlice_Fini(PyThreadState *tstate); extern void _PyAsyncGen_Fini(void); extern void PyOS_FiniInterrupts(void); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 016f11668ee44..74c7a499bdef0 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,2 +1,3 @@ -Tuple free lists, empty tuple singleton, and float free list are no longer -shared by all interpreters: each interpreter now its own free lists. +The tuple free lists, the empty tuple singleton, the float free list, and the +slice cache are no longer shared by all interpreters: each interpreter now has +its own free lists and caches. diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index 391711f711aae..f97a570a787f0 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -15,7 +15,7 @@ this type and there is exactly one in existence. #include "Python.h" #include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_object.h" +#include "pycore_object.h" // _PyObject_GC_TRACK() #include "structmember.h" // PyMemberDef static PyObject * @@ -95,16 +95,13 @@ PyObject _Py_EllipsisObject = { /* Slice object implementation */ -/* Using a cache is very effective since typically only a single slice is - * created and then deleted again - */ -static PySliceObject *slice_cache = NULL; -void _PySlice_Fini(void) +void _PySlice_Fini(PyThreadState *tstate) { - PySliceObject *obj = slice_cache; + PyInterpreterState *interp = tstate->interp; + PySliceObject *obj = interp->slice_cache; if (obj != NULL) { - slice_cache = NULL; + interp->slice_cache = NULL; PyObject_GC_Del(obj); } } @@ -116,10 +113,11 @@ void _PySlice_Fini(void) PyObject * PySlice_New(PyObject *start, PyObject *stop, PyObject *step) { + PyInterpreterState *interp = _PyInterpreterState_GET(); PySliceObject *obj; - if (slice_cache != NULL) { - obj = slice_cache; - slice_cache = NULL; + if (interp->slice_cache != NULL) { + obj = interp->slice_cache; + interp->slice_cache = NULL; _Py_NewReference((PyObject *)obj); } else { obj = PyObject_GC_New(PySliceObject, &PySlice_Type); @@ -324,14 +322,17 @@ Create a slice object. This is used for extended slicing (e.g. a[0:10:2])."); static void slice_dealloc(PySliceObject *r) { + PyInterpreterState *interp = _PyInterpreterState_GET(); _PyObject_GC_UNTRACK(r); Py_DECREF(r->step); Py_DECREF(r->start); Py_DECREF(r->stop); - if (slice_cache == NULL) - slice_cache = r; - else + if (interp->slice_cache == NULL) { + interp->slice_cache = r; + } + else { PyObject_GC_Del(r); + } } static PyObject * diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 716303cffc764..ee9d698d7d089 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1265,9 +1265,9 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) if (is_main_interp) { _PyDict_Fini(); - _PySlice_Fini(); } + _PySlice_Fini(tstate); _PyWarnings_Fini(tstate->interp); if (is_main_interp) { From webhook-mailer at python.org Thu Jun 4 19:39:29 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 04 Jun 2020 23:39:29 -0000 Subject: [Python-checkins] bpo-40521: Make frame free list per-interpreter (GH-20638) Message-ID: https://github.com/python/cpython/commit/3744ed2c9c0b3905947602fc375de49533790cb9 commit: 3744ed2c9c0b3905947602fc375de49533790cb9 branch: master author: Victor Stinner committer: GitHub date: 2020-06-05T01:39:24+02:00 summary: bpo-40521: Make frame free list per-interpreter (GH-20638) Each interpreter now has its own frame free list: * Move frame free list into PyInterpreterState. * Add _Py_frame_state structure. * Add tstate parameter to _PyFrame_ClearFreeList() and _PyFrame_Fini(). * Remove "#if PyFrame_MAXFREELIST > 0". * Remove "#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS". files: M Include/internal/pycore_gc.h M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Modules/gcmodule.c M Objects/frameobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index f90d80be16878..01265d3f985b9 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -165,7 +165,7 @@ PyAPI_FUNC(void) _PyGC_InitState(struct _gc_runtime_state *); // Functions to clear types free lists -extern void _PyFrame_ClearFreeList(void); +extern void _PyFrame_ClearFreeList(PyThreadState *tstate); extern void _PyTuple_ClearFreeList(PyThreadState *tstate); extern void _PyFloat_ClearFreeList(PyThreadState *tstate); extern void _PyList_ClearFreeList(void); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 70054efe7ec71..9b805f004eaa6 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -92,6 +92,12 @@ struct _Py_float_state { PyFloatObject *free_list; }; +struct _Py_frame_state { + PyFrameObject *free_list; + /* number of frames currently in free_list */ + int numfree; +}; + /* interpreter state */ @@ -187,6 +193,7 @@ struct _is { #endif struct _Py_tuple_state tuple; struct _Py_float_state float_state; + struct _Py_frame_state frame; /* Using a cache is very effective since typically only a single slice is created and then deleted again. */ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index bba9bd9b2bdb2..06d2ac167d619 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -58,7 +58,7 @@ extern PyStatus _PyGC_Init(PyThreadState *tstate); /* Various internal finalizers */ -extern void _PyFrame_Fini(void); +extern void _PyFrame_Fini(PyThreadState *tstate); extern void _PyDict_Fini(void); extern void _PyTuple_Fini(PyThreadState *tstate); extern void _PyList_Fini(void); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 74c7a499bdef0..71a1064ba7d14 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,3 +1,3 @@ -The tuple free lists, the empty tuple singleton, the float free list, and the -slice cache are no longer shared by all interpreters: each interpreter now has -its own free lists and caches. +The tuple free lists, the empty tuple singleton, the float free list, the slice +cache, and the frame free list are no longer shared by all interpreters: each +interpreter now its has own free lists and caches. diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 0bad0f8917f37..45dc89d08c1fb 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1026,7 +1026,7 @@ static void clear_freelists(void) { PyThreadState *tstate = _PyThreadState_GET(); - _PyFrame_ClearFreeList(); + _PyFrame_ClearFreeList(tstate); _PyTuple_ClearFreeList(tstate); _PyFloat_ClearFreeList(tstate); _PyList_ClearFreeList(); diff --git a/Objects/frameobject.c b/Objects/frameobject.c index b6d073bd456d0..0fe9f2a6666b2 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -561,36 +561,25 @@ static PyGetSetDef frame_getsetlist[] = { /* max value for numfree */ #define PyFrame_MAXFREELIST 200 -/* bpo-40521: frame free lists are shared by all interpreters. */ -#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS -# undef PyFrame_MAXFREELIST -# define PyFrame_MAXFREELIST 0 -#endif - -#if PyFrame_MAXFREELIST > 0 -static PyFrameObject *free_list = NULL; -static int numfree = 0; /* number of frames currently in free_list */ -#endif - static void _Py_HOT_FUNCTION frame_dealloc(PyFrameObject *f) { - PyObject **p, **valuestack; - PyCodeObject *co; - - if (_PyObject_GC_IS_TRACKED(f)) + if (_PyObject_GC_IS_TRACKED(f)) { _PyObject_GC_UNTRACK(f); + } Py_TRASHCAN_SAFE_BEGIN(f) /* Kill all local variables */ - valuestack = f->f_valuestack; - for (p = f->f_localsplus; p < valuestack; p++) + PyObject **valuestack = f->f_valuestack; + for (PyObject **p = f->f_localsplus; p < valuestack; p++) { Py_CLEAR(*p); + } /* Free stack */ if (f->f_stacktop != NULL) { - for (p = valuestack; p < f->f_stacktop; p++) + for (PyObject **p = valuestack; p < f->f_stacktop; p++) { Py_XDECREF(*p); + } } Py_XDECREF(f->f_back); @@ -599,19 +588,21 @@ frame_dealloc(PyFrameObject *f) Py_CLEAR(f->f_locals); Py_CLEAR(f->f_trace); - co = f->f_code; + PyCodeObject *co = f->f_code; if (co->co_zombieframe == NULL) { co->co_zombieframe = f; } -#if PyFrame_MAXFREELIST > 0 - else if (numfree < PyFrame_MAXFREELIST) { - ++numfree; - f->f_back = free_list; - free_list = f; - } -#endif else { - PyObject_GC_Del(f); + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_frame_state *state = &interp->frame; + if (state->numfree < PyFrame_MAXFREELIST) { + ++state->numfree; + f->f_back = state->free_list; + state->free_list = f; + } + else { + PyObject_GC_Del(f); + } } Py_DECREF(co); @@ -789,21 +780,20 @@ frame_alloc(PyCodeObject *code) Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars); Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars); Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; -#if PyFrame_MAXFREELIST > 0 - if (free_list == NULL) -#endif + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_frame_state *state = &interp->frame; + if (state->free_list == NULL) { f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); if (f == NULL) { return NULL; } } -#if PyFrame_MAXFREELIST > 0 else { - assert(numfree > 0); - --numfree; - f = free_list; - free_list = free_list->f_back; + assert(state->numfree > 0); + --state->numfree; + f = state->free_list; + state->free_list = state->free_list->f_back; if (Py_SIZE(f) < extras) { PyFrameObject *new_f = PyObject_GC_Resize(PyFrameObject, f, extras); if (new_f == NULL) { @@ -814,7 +804,6 @@ frame_alloc(PyCodeObject *code) } _Py_NewReference((PyObject *)f); } -#endif f->f_code = code; extras = code->co_nlocals + ncells + nfrees; @@ -1183,34 +1172,33 @@ PyFrame_LocalsToFast(PyFrameObject *f, int clear) /* Clear out the free list */ void -_PyFrame_ClearFreeList(void) +_PyFrame_ClearFreeList(PyThreadState *tstate) { -#if PyFrame_MAXFREELIST > 0 - while (free_list != NULL) { - PyFrameObject *f = free_list; - free_list = free_list->f_back; + struct _Py_frame_state *state = &tstate->interp->frame; + while (state->free_list != NULL) { + PyFrameObject *f = state->free_list; + state->free_list = state->free_list->f_back; PyObject_GC_Del(f); - --numfree; + --state->numfree; } - assert(numfree == 0); -#endif + assert(state->numfree == 0); } void -_PyFrame_Fini(void) +_PyFrame_Fini(PyThreadState *tstate) { - _PyFrame_ClearFreeList(); + _PyFrame_ClearFreeList(tstate); } /* Print summary info about the state of the optimized allocator */ void _PyFrame_DebugMallocStats(FILE *out) { -#if PyFrame_MAXFREELIST > 0 + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_frame_state *state = &interp->frame; _PyDebugAllocatorStats(out, "free PyFrameObject", - numfree, sizeof(PyFrameObject)); -#endif + state->numfree, sizeof(PyFrameObject)); } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index ee9d698d7d089..1dbdbfdf5a318 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1249,10 +1249,7 @@ flush_std_files(void) static void finalize_interp_types(PyThreadState *tstate, int is_main_interp) { - if (is_main_interp) { - /* Sundry finalizers */ - _PyFrame_Fini(); - } + _PyFrame_Fini(tstate); _PyTuple_Fini(tstate); if (is_main_interp) { _PyList_Fini(); From webhook-mailer at python.org Thu Jun 4 19:40:29 2020 From: webhook-mailer at python.org (Cheryl Sabella) Date: Thu, 04 Jun 2020 23:40:29 -0000 Subject: [Python-checkins] bpo-40807: Show warnings once from codeop._maybe_compile (#20486) Message-ID: https://github.com/python/cpython/commit/052d3fc0907be253cfd64b2c737a0b0aca586011 commit: 052d3fc0907be253cfd64b2c737a0b0aca586011 branch: master author: Cheryl Sabella committer: GitHub date: 2020-06-04T19:40:24-04:00 summary: bpo-40807: Show warnings once from codeop._maybe_compile (#20486) * bpo-40807: Show warnings once from codeop._maybe_compile * Move catch_warnings * news Co-authored-by: Terry Jan Reedy files: A Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst M Lib/codeop.py M Lib/test/test_codeop.py diff --git a/Lib/codeop.py b/Lib/codeop.py index 835e68c09ba27..7e192ea6a10a0 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -57,6 +57,7 @@ """ import __future__ +import warnings _features = [getattr(__future__, fname) for fname in __future__.all_feature_names] @@ -83,15 +84,18 @@ def _maybe_compile(compiler, source, filename, symbol): except SyntaxError: pass - try: - code1 = compiler(source + "\n", filename, symbol) - except SyntaxError as e: - err1 = e - - try: - code2 = compiler(source + "\n\n", filename, symbol) - except SyntaxError as e: - err2 = e + # Suppress warnings after the first compile to avoid duplication. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + code1 = compiler(source + "\n", filename, symbol) + except SyntaxError as e: + err1 = e + + try: + code2 = compiler(source + "\n\n", filename, symbol) + except SyntaxError as e: + err2 = e try: if code: diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 0c5e362feea0c..45cb1a7b74e90 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -303,6 +303,11 @@ def test_filename(self): self.assertNotEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "def", 'single').co_filename) + def test_warning(self): + # Test that the warning is only returned once. + with support.check_warnings((".*literal", SyntaxWarning)) as w: + compile_command("0 is 0") + self.assertEqual(len(w.warnings), 1) if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst b/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst new file mode 100644 index 0000000000000..532b809b77eed --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst @@ -0,0 +1,2 @@ +Stop codeop._maybe_compile, used by code.InteractiveInterpreter (and IDLE). +from from emitting each warning three times. From webhook-mailer at python.org Thu Jun 4 20:05:50 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 05 Jun 2020 00:05:50 -0000 Subject: [Python-checkins] bpo-40521: Make list free list per-interpreter (GH-20642) Message-ID: https://github.com/python/cpython/commit/88ec9190105c9b03f49aaef601ce02b242a75273 commit: 88ec9190105c9b03f49aaef601ce02b242a75273 branch: master author: Victor Stinner committer: GitHub date: 2020-06-05T02:05:41+02:00 summary: bpo-40521: Make list free list per-interpreter (GH-20642) Each interpreter now has its own list free list: * Move list numfree and free_list into PyInterpreterState. * Add _Py_list_state structure. * Add tstate parameter to _PyList_ClearFreeList() and _PyList_Fini(). * Remove "#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS". * _PyGC_Fini() clears gcstate->garbage list which can be stored in the list free list. Call _PyGC_Fini() before _PyList_Fini() to prevent leaking this list. files: M Include/internal/pycore_gc.h M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Modules/gcmodule.c M Objects/listobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 01265d3f985b9..3388b4d69e264 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -168,7 +168,7 @@ PyAPI_FUNC(void) _PyGC_InitState(struct _gc_runtime_state *); extern void _PyFrame_ClearFreeList(PyThreadState *tstate); extern void _PyTuple_ClearFreeList(PyThreadState *tstate); extern void _PyFloat_ClearFreeList(PyThreadState *tstate); -extern void _PyList_ClearFreeList(void); +extern void _PyList_ClearFreeList(PyThreadState *tstate); extern void _PyDict_ClearFreeList(void); extern void _PyAsyncGen_ClearFreeLists(void); extern void _PyContext_ClearFreeList(void); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 9b805f004eaa6..0eab246562051 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -84,6 +84,16 @@ struct _Py_tuple_state { #endif }; +/* Empty list reuse scheme to save calls to malloc and free */ +#ifndef PyList_MAXFREELIST +# define PyList_MAXFREELIST 80 +#endif + +struct _Py_list_state { + PyListObject *free_list[PyList_MAXFREELIST]; + int numfree; +}; + struct _Py_float_state { /* Special free list free_list is a singly-linked list of available PyFloatObjects, @@ -192,6 +202,7 @@ struct _is { PyLongObject* small_ints[_PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS]; #endif struct _Py_tuple_state tuple; + struct _Py_list_state list; struct _Py_float_state float_state; struct _Py_frame_state frame; diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 06d2ac167d619..3c35ca23eab1a 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -61,7 +61,7 @@ extern PyStatus _PyGC_Init(PyThreadState *tstate); extern void _PyFrame_Fini(PyThreadState *tstate); extern void _PyDict_Fini(void); extern void _PyTuple_Fini(PyThreadState *tstate); -extern void _PyList_Fini(void); +extern void _PyList_Fini(PyThreadState *tstate); extern void _PySet_Fini(void); extern void _PyBytes_Fini(void); extern void _PyFloat_Fini(PyThreadState *tstate); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 71a1064ba7d14..54cc60036164e 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,3 +1,3 @@ -The tuple free lists, the empty tuple singleton, the float free list, the slice -cache, and the frame free list are no longer shared by all interpreters: each -interpreter now its has own free lists and caches. +The tuple free lists, the empty tuple singleton, the list free list, the float +free list, the slice cache, and the frame free list are no longer shared by all +interpreters: each interpreter now its has own free lists and caches. diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 45dc89d08c1fb..2f062d0022589 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1029,7 +1029,7 @@ clear_freelists(void) _PyFrame_ClearFreeList(tstate); _PyTuple_ClearFreeList(tstate); _PyFloat_ClearFreeList(tstate); - _PyList_ClearFreeList(); + _PyList_ClearFreeList(tstate); _PyDict_ClearFreeList(); _PyAsyncGen_ClearFreeLists(); _PyContext_ClearFreeList(); diff --git a/Objects/listobject.c b/Objects/listobject.c index 30d2620753744..043256d8adbf5 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -96,65 +96,59 @@ list_preallocate_exact(PyListObject *self, Py_ssize_t size) return 0; } -/* Empty list reuse scheme to save calls to malloc and free */ -#ifndef PyList_MAXFREELIST -# define PyList_MAXFREELIST 80 -#endif - -/* bpo-40521: list free lists are shared by all interpreters. */ -#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS -# undef PyList_MAXFREELIST -# define PyList_MAXFREELIST 0 -#endif - -static PyListObject *free_list[PyList_MAXFREELIST]; -static int numfree = 0; - void -_PyList_ClearFreeList(void) +_PyList_ClearFreeList(PyThreadState *tstate) { - while (numfree) { - PyListObject *op = free_list[--numfree]; + struct _Py_list_state *state = &tstate->interp->list; + while (state->numfree) { + PyListObject *op = state->free_list[--state->numfree]; assert(PyList_CheckExact(op)); PyObject_GC_Del(op); } } void -_PyList_Fini(void) +_PyList_Fini(PyThreadState *tstate) { - _PyList_ClearFreeList(); + _PyList_ClearFreeList(tstate); } /* Print summary info about the state of the optimized allocator */ void _PyList_DebugMallocStats(FILE *out) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_list_state *state = &interp->list; _PyDebugAllocatorStats(out, "free PyListObject", - numfree, sizeof(PyListObject)); + state->numfree, sizeof(PyListObject)); } PyObject * PyList_New(Py_ssize_t size) { - PyListObject *op; - if (size < 0) { PyErr_BadInternalCall(); return NULL; } - if (numfree) { - numfree--; - op = free_list[numfree]; + + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_list_state *state = &interp->list; + PyListObject *op; + if (state->numfree) { + state->numfree--; + op = state->free_list[state->numfree]; _Py_NewReference((PyObject *)op); - } else { + } + else { op = PyObject_GC_New(PyListObject, &PyList_Type); - if (op == NULL) + if (op == NULL) { return NULL; + } } - if (size <= 0) + if (size <= 0) { op->ob_item = NULL; + } else { op->ob_item = (PyObject **) PyMem_Calloc(size, sizeof(PyObject *)); if (op->ob_item == NULL) { @@ -334,10 +328,14 @@ list_dealloc(PyListObject *op) } PyMem_FREE(op->ob_item); } - if (numfree < PyList_MAXFREELIST && PyList_CheckExact(op)) - free_list[numfree++] = op; - else + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_list_state *state = &interp->list; + if (state->numfree < PyList_MAXFREELIST && PyList_CheckExact(op)) { + state->free_list[state->numfree++] = op; + } + else { Py_TYPE(op)->tp_free((PyObject *)op); + } Py_TRASHCAN_END } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 1dbdbfdf5a318..09d4d88404144 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1251,8 +1251,8 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) { _PyFrame_Fini(tstate); _PyTuple_Fini(tstate); + _PyList_Fini(tstate); if (is_main_interp) { - _PyList_Fini(); _PySet_Fini(); _PyBytes_Fini(); } @@ -1296,6 +1296,8 @@ finalize_interp_clear(PyThreadState *tstate) _PyGC_CollectNoFail(); } + _PyGC_Fini(tstate); + finalize_interp_types(tstate, is_main_interp); if (is_main_interp) { @@ -1309,8 +1311,6 @@ finalize_interp_clear(PyThreadState *tstate) _PyExc_Fini(); } - - _PyGC_Fini(tstate); } From webhook-mailer at python.org Thu Jun 4 20:34:22 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 05 Jun 2020 00:34:22 -0000 Subject: [Python-checkins] bpo-40521: Make async gen free lists per-interpreter (GH-20643) Message-ID: https://github.com/python/cpython/commit/78a02c2568714562e23e885b6dc5730601f35226 commit: 78a02c2568714562e23e885b6dc5730601f35226 branch: master author: Victor Stinner committer: GitHub date: 2020-06-05T02:34:14+02:00 summary: bpo-40521: Make async gen free lists per-interpreter (GH-20643) Each interpreter now has its own asynchronous generator free lists: * Move async gen free lists into PyInterpreterState. * Move _PyAsyncGen_MAXFREELIST define to pycore_interp.h * Add _Py_async_gen_state structure. * Add tstate parameter to _PyAsyncGen_ClearFreeLists and _PyAsyncGen_Fini(). files: M Include/internal/pycore_gc.h M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Modules/gcmodule.c M Objects/genobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index 3388b4d69e264..ad2e552df55f9 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -170,7 +170,7 @@ extern void _PyTuple_ClearFreeList(PyThreadState *tstate); extern void _PyFloat_ClearFreeList(PyThreadState *tstate); extern void _PyList_ClearFreeList(PyThreadState *tstate); extern void _PyDict_ClearFreeList(void); -extern void _PyAsyncGen_ClearFreeLists(void); +extern void _PyAsyncGen_ClearFreeLists(PyThreadState *tstate); extern void _PyContext_ClearFreeList(void); #ifdef __cplusplus diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 0eab246562051..d624218201b91 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -108,6 +108,23 @@ struct _Py_frame_state { int numfree; }; +#ifndef _PyAsyncGen_MAXFREELIST +# define _PyAsyncGen_MAXFREELIST 80 +#endif + +struct _Py_async_gen_state { + /* Freelists boost performance 6-10%; they also reduce memory + fragmentation, as _PyAsyncGenWrappedValue and PyAsyncGenASend + are short-living objects that are instantiated for every + __anext__() call. */ + struct _PyAsyncGenWrappedValue* value_freelist[_PyAsyncGen_MAXFREELIST]; + int value_numfree; + + struct PyAsyncGenASend* asend_freelist[_PyAsyncGen_MAXFREELIST]; + int asend_numfree; +}; + + /* interpreter state */ @@ -205,6 +222,7 @@ struct _is { struct _Py_list_state list; struct _Py_float_state float_state; struct _Py_frame_state frame; + struct _Py_async_gen_state async_gen; /* Using a cache is very effective since typically only a single slice is created and then deleted again. */ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 3c35ca23eab1a..3e3657339a4a4 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -66,7 +66,7 @@ extern void _PySet_Fini(void); extern void _PyBytes_Fini(void); extern void _PyFloat_Fini(PyThreadState *tstate); extern void _PySlice_Fini(PyThreadState *tstate); -extern void _PyAsyncGen_Fini(void); +extern void _PyAsyncGen_Fini(PyThreadState *tstate); extern void PyOS_FiniInterrupts(void); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 54cc60036164e..f0fd5a1e13b79 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,3 +1,4 @@ The tuple free lists, the empty tuple singleton, the list free list, the float -free list, the slice cache, and the frame free list are no longer shared by all -interpreters: each interpreter now its has own free lists and caches. +free list, the slice cache, the frame free list, the asynchronous generator +free lists are no longer shared by all interpreters: each interpreter now its +has own free lists and caches. diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 2f062d0022589..89e2db7b19495 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1031,7 +1031,7 @@ clear_freelists(void) _PyFloat_ClearFreeList(tstate); _PyList_ClearFreeList(tstate); _PyDict_ClearFreeList(); - _PyAsyncGen_ClearFreeLists(); + _PyAsyncGen_ClearFreeLists(tstate); _PyContext_ClearFreeList(); } diff --git a/Objects/genobject.c b/Objects/genobject.c index 1393f42533a59..f7dbfd7486419 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -1162,7 +1162,7 @@ typedef enum { } AwaitableState; -typedef struct { +typedef struct PyAsyncGenASend { PyObject_HEAD PyAsyncGenObject *ags_gen; @@ -1174,7 +1174,7 @@ typedef struct { } PyAsyncGenASend; -typedef struct { +typedef struct PyAsyncGenAThrow { PyObject_HEAD PyAsyncGenObject *agt_gen; @@ -1186,28 +1186,12 @@ typedef struct { } PyAsyncGenAThrow; -typedef struct { +typedef struct _PyAsyncGenWrappedValue { PyObject_HEAD PyObject *agw_val; } _PyAsyncGenWrappedValue; -#ifndef _PyAsyncGen_MAXFREELIST -#define _PyAsyncGen_MAXFREELIST 80 -#endif - -/* Freelists boost performance 6-10%; they also reduce memory - fragmentation, as _PyAsyncGenWrappedValue and PyAsyncGenASend - are short-living objects that are instantiated for every - __anext__ call. -*/ - -static _PyAsyncGenWrappedValue *ag_value_freelist[_PyAsyncGen_MAXFREELIST]; -static int ag_value_freelist_free = 0; - -static PyAsyncGenASend *ag_asend_freelist[_PyAsyncGen_MAXFREELIST]; -static int ag_asend_freelist_free = 0; - #define _PyAsyncGenWrappedValue_CheckExact(o) \ Py_IS_TYPE(o, &_PyAsyncGenWrappedValue_Type) @@ -1423,27 +1407,29 @@ PyAsyncGen_New(PyFrameObject *f, PyObject *name, PyObject *qualname) void -_PyAsyncGen_ClearFreeLists(void) +_PyAsyncGen_ClearFreeLists(PyThreadState *tstate) { - while (ag_value_freelist_free) { + struct _Py_async_gen_state *state = &tstate->interp->async_gen; + + while (state->value_numfree) { _PyAsyncGenWrappedValue *o; - o = ag_value_freelist[--ag_value_freelist_free]; + o = state->value_freelist[--state->value_numfree]; assert(_PyAsyncGenWrappedValue_CheckExact(o)); PyObject_GC_Del(o); } - while (ag_asend_freelist_free) { + while (state->asend_numfree) { PyAsyncGenASend *o; - o = ag_asend_freelist[--ag_asend_freelist_free]; + o = state->asend_freelist[--state->asend_numfree]; assert(Py_IS_TYPE(o, &_PyAsyncGenASend_Type)); PyObject_GC_Del(o); } } void -_PyAsyncGen_Fini(void) +_PyAsyncGen_Fini(PyThreadState *tstate) { - _PyAsyncGen_ClearFreeLists(); + _PyAsyncGen_ClearFreeLists(tstate); } @@ -1486,10 +1472,13 @@ async_gen_asend_dealloc(PyAsyncGenASend *o) _PyObject_GC_UNTRACK((PyObject *)o); Py_CLEAR(o->ags_gen); Py_CLEAR(o->ags_sendval); - if (ag_asend_freelist_free < _PyAsyncGen_MAXFREELIST) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_async_gen_state *state = &interp->async_gen; + if (state->asend_numfree < _PyAsyncGen_MAXFREELIST) { assert(PyAsyncGenASend_CheckExact(o)); - ag_asend_freelist[ag_asend_freelist_free++] = o; - } else { + state->asend_freelist[state->asend_numfree++] = o; + } + else { PyObject_GC_Del(o); } } @@ -1641,11 +1630,14 @@ static PyObject * async_gen_asend_new(PyAsyncGenObject *gen, PyObject *sendval) { PyAsyncGenASend *o; - if (ag_asend_freelist_free) { - ag_asend_freelist_free--; - o = ag_asend_freelist[ag_asend_freelist_free]; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_async_gen_state *state = &interp->async_gen; + if (state->asend_numfree) { + state->asend_numfree--; + o = state->asend_freelist[state->asend_numfree]; _Py_NewReference((PyObject *)o); - } else { + } + else { o = PyObject_GC_New(PyAsyncGenASend, &_PyAsyncGenASend_Type); if (o == NULL) { return NULL; @@ -1673,10 +1665,13 @@ async_gen_wrapped_val_dealloc(_PyAsyncGenWrappedValue *o) { _PyObject_GC_UNTRACK((PyObject *)o); Py_CLEAR(o->agw_val); - if (ag_value_freelist_free < _PyAsyncGen_MAXFREELIST) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_async_gen_state *state = &interp->async_gen; + if (state->value_numfree < _PyAsyncGen_MAXFREELIST) { assert(_PyAsyncGenWrappedValue_CheckExact(o)); - ag_value_freelist[ag_value_freelist_free++] = o; - } else { + state->value_freelist[state->value_numfree++] = o; + } + else { PyObject_GC_Del(o); } } @@ -1740,12 +1735,15 @@ _PyAsyncGenValueWrapperNew(PyObject *val) _PyAsyncGenWrappedValue *o; assert(val); - if (ag_value_freelist_free) { - ag_value_freelist_free--; - o = ag_value_freelist[ag_value_freelist_free]; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_async_gen_state *state = &interp->async_gen; + if (state->value_numfree) { + state->value_numfree--; + o = state->value_freelist[state->value_numfree]; assert(_PyAsyncGenWrappedValue_CheckExact(o)); _Py_NewReference((PyObject*)o); - } else { + } + else { o = PyObject_GC_New(_PyAsyncGenWrappedValue, &_PyAsyncGenWrappedValue_Type); if (o == NULL) { diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 09d4d88404144..073973e1328d4 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1270,7 +1270,11 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) if (is_main_interp) { _Py_HashRandomization_Fini(); _PyArg_Fini(); - _PyAsyncGen_Fini(); + } + + _PyAsyncGen_Fini(tstate); + + if (is_main_interp) { _PyContext_Fini(); } From webhook-mailer at python.org Thu Jun 4 20:56:44 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 05 Jun 2020 00:56:44 -0000 Subject: [Python-checkins] bpo-40521: Make context free list per-interpreter (GH-20644) Message-ID: https://github.com/python/cpython/commit/e005ead49b1ee2b1507ceea94e6f89c28ecf1f81 commit: e005ead49b1ee2b1507ceea94e6f89c28ecf1f81 branch: master author: Victor Stinner committer: GitHub date: 2020-06-05T02:56:37+02:00 summary: bpo-40521: Make context free list per-interpreter (GH-20644) Each interpreter now has its own context free list: * Move context free list into PyInterpreterState. * Add _Py_context_state structure. * Add tstate parameter to _PyContext_ClearFreeList() and _PyContext_Fini(). * Pass tstate to clear_freelists(). files: M Include/internal/pycore_context.h M Include/internal/pycore_gc.h M Include/internal/pycore_interp.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Modules/gcmodule.c M Python/context.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_context.h b/Include/internal/pycore_context.h index f665ad5c115b0..ea4b3c8ea738f 100644 --- a/Include/internal/pycore_context.h +++ b/Include/internal/pycore_context.h @@ -37,6 +37,6 @@ struct _pycontexttokenobject { int _PyContext_Init(void); -void _PyContext_Fini(void); +void _PyContext_Fini(PyThreadState *tstate); #endif /* !Py_INTERNAL_CONTEXT_H */ diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index ad2e552df55f9..fd3fb7f94cab0 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -171,7 +171,7 @@ extern void _PyFloat_ClearFreeList(PyThreadState *tstate); extern void _PyList_ClearFreeList(PyThreadState *tstate); extern void _PyDict_ClearFreeList(void); extern void _PyAsyncGen_ClearFreeLists(PyThreadState *tstate); -extern void _PyContext_ClearFreeList(void); +extern void _PyContext_ClearFreeList(PyThreadState *tstate); #ifdef __cplusplus } diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index d624218201b91..4f811023f7a04 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -124,6 +124,12 @@ struct _Py_async_gen_state { int asend_numfree; }; +struct _Py_context_state { + // List of free PyContext objects + PyContext *freelist; + int numfree; +}; + /* interpreter state */ @@ -223,6 +229,7 @@ struct _is { struct _Py_float_state float_state; struct _Py_frame_state frame; struct _Py_async_gen_state async_gen; + struct _Py_context_state context; /* Using a cache is very effective since typically only a single slice is created and then deleted again. */ diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index f0fd5a1e13b79..39cb80447f6a9 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,4 +1,4 @@ The tuple free lists, the empty tuple singleton, the list free list, the float free list, the slice cache, the frame free list, the asynchronous generator -free lists are no longer shared by all interpreters: each interpreter now its -has own free lists and caches. +free lists, and the context free list are no longer shared by all interpreters: +each interpreter now its has own free lists and caches. diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 89e2db7b19495..f68258d7a327c 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1023,16 +1023,15 @@ delete_garbage(PyThreadState *tstate, GCState *gcstate, * Clearing the free lists may give back memory to the OS earlier. */ static void -clear_freelists(void) +clear_freelists(PyThreadState *tstate) { - PyThreadState *tstate = _PyThreadState_GET(); _PyFrame_ClearFreeList(tstate); _PyTuple_ClearFreeList(tstate); _PyFloat_ClearFreeList(tstate); _PyList_ClearFreeList(tstate); _PyDict_ClearFreeList(); _PyAsyncGen_ClearFreeLists(tstate); - _PyContext_ClearFreeList(); + _PyContext_ClearFreeList(tstate); } // Show stats for objects in each generations @@ -1306,7 +1305,7 @@ collect(PyThreadState *tstate, int generation, /* Clear free list only during the collection of the highest * generation */ if (generation == NUM_GENERATIONS-1) { - clear_freelists(); + clear_freelists(tstate); } if (_PyErr_Occurred(tstate)) { diff --git a/Python/context.c b/Python/context.c index bacc7010c458e..3cf8db4c90cdf 100644 --- a/Python/context.c +++ b/Python/context.c @@ -10,8 +10,6 @@ #define CONTEXT_FREELIST_MAXLEN 255 -static PyContext *ctx_freelist = NULL; -static int ctx_freelist_len = 0; #include "clinic/context.c.h" @@ -334,11 +332,13 @@ class _contextvars.Context "PyContext *" "&PyContext_Type" static inline PyContext * _context_alloc(void) { + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_context_state *state = &interp->context; PyContext *ctx; - if (ctx_freelist_len) { - ctx_freelist_len--; - ctx = ctx_freelist; - ctx_freelist = (PyContext *)ctx->ctx_weakreflist; + if (state->numfree) { + state->numfree--; + ctx = state->freelist; + state->freelist = (PyContext *)ctx->ctx_weakreflist; ctx->ctx_weakreflist = NULL; _Py_NewReference((PyObject *)ctx); } @@ -458,10 +458,12 @@ context_tp_dealloc(PyContext *self) } (void)context_tp_clear(self); - if (ctx_freelist_len < CONTEXT_FREELIST_MAXLEN) { - ctx_freelist_len++; - self->ctx_weakreflist = (PyObject *)ctx_freelist; - ctx_freelist = self; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_context_state *state = &interp->context; + if (state->numfree < CONTEXT_FREELIST_MAXLEN) { + state->numfree++; + self->ctx_weakreflist = (PyObject *)state->freelist; + state->freelist = self; } else { Py_TYPE(self)->tp_free(self); @@ -1271,11 +1273,12 @@ get_token_missing(void) void -_PyContext_ClearFreeList(void) +_PyContext_ClearFreeList(PyThreadState *tstate) { - for (; ctx_freelist_len; ctx_freelist_len--) { - PyContext *ctx = ctx_freelist; - ctx_freelist = (PyContext *)ctx->ctx_weakreflist; + struct _Py_context_state *state = &tstate->interp->context; + for (; state->numfree; state->numfree--) { + PyContext *ctx = state->freelist; + state->freelist = (PyContext *)ctx->ctx_weakreflist; ctx->ctx_weakreflist = NULL; PyObject_GC_Del(ctx); } @@ -1283,10 +1286,10 @@ _PyContext_ClearFreeList(void) void -_PyContext_Fini(void) +_PyContext_Fini(PyThreadState *tstate) { Py_CLEAR(_token_missing); - _PyContext_ClearFreeList(); + _PyContext_ClearFreeList(tstate); _PyHamt_Fini(); } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 073973e1328d4..6d2eb1defc884 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1273,10 +1273,7 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) } _PyAsyncGen_Fini(tstate); - - if (is_main_interp) { - _PyContext_Fini(); - } + _PyContext_Fini(tstate); /* Cleanup Unicode implementation */ _PyUnicode_Fini(tstate); From webhook-mailer at python.org Thu Jun 4 22:42:51 2020 From: webhook-mailer at python.org (Terry Jan Reedy) Date: Fri, 05 Jun 2020 02:42:51 -0000 Subject: [Python-checkins] [3.8] bpo-40807: Backport test_codeop change [GH-19670] Message-ID: https://github.com/python/cpython/commit/a5d6aba318ead9cc756ba750a70da41f5def3f8f commit: a5d6aba318ead9cc756ba750a70da41f5def3f8f branch: 3.8 author: Terry Jan Reedy committer: GitHub date: 2020-06-04T22:42:44-04:00 summary: [3.8] bpo-40807: Backport test_codeop change [GH-19670] A tiny sliver of a 3.9 PEG parser patch needed to backport the test added by #20486. files: M Lib/test/test_codeop.py diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 98da26fa5dab1..4d52d15fa0fb3 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -3,12 +3,12 @@ Nick Mathewson """ import unittest -from test.support import is_jython +from test import support from codeop import compile_command, PyCF_DONT_IMPLY_DEDENT import io -if is_jython: +if support.is_jython: import sys def unify_callables(d): @@ -21,7 +21,7 @@ class CodeopTests(unittest.TestCase): def assertValid(self, str, symbol='single'): '''succeed iff str is a valid piece of code''' - if is_jython: + if support.is_jython: code = compile_command(str, "", symbol) self.assertTrue(code) if symbol == "single": @@ -60,7 +60,7 @@ def test_valid(self): av = self.assertValid # special case - if not is_jython: + if not support.is_jython: self.assertEqual(compile_command(""), compile("pass", "", 'single', PyCF_DONT_IMPLY_DEDENT)) From webhook-mailer at python.org Thu Jun 4 23:00:59 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 05 Jun 2020 03:00:59 -0000 Subject: [Python-checkins] [3.8] bpo-40807: Backport test_codeop change [GH-19670] Message-ID: https://github.com/python/cpython/commit/12d3061c7819a73d891dcce44327410eaf0e1bc2 commit: 12d3061c7819a73d891dcce44327410eaf0e1bc2 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-04T23:00:47-04:00 summary: [3.8] bpo-40807: Backport test_codeop change [GH-19670] A tiny sliver of a 3.9 PEG parser patch needed to backport the test added by GH-20486. (cherry picked from commit a5d6aba318ead9cc756ba750a70da41f5def3f8f) Co-authored-by: Terry Jan Reedy files: M Lib/test/test_codeop.py diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 98da26fa5dab1..4d52d15fa0fb3 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -3,12 +3,12 @@ Nick Mathewson """ import unittest -from test.support import is_jython +from test import support from codeop import compile_command, PyCF_DONT_IMPLY_DEDENT import io -if is_jython: +if support.is_jython: import sys def unify_callables(d): @@ -21,7 +21,7 @@ class CodeopTests(unittest.TestCase): def assertValid(self, str, symbol='single'): '''succeed iff str is a valid piece of code''' - if is_jython: + if support.is_jython: code = compile_command(str, "", symbol) self.assertTrue(code) if symbol == "single": @@ -60,7 +60,7 @@ def test_valid(self): av = self.assertValid # special case - if not is_jython: + if not support.is_jython: self.assertEqual(compile_command(""), compile("pass", "", 'single', PyCF_DONT_IMPLY_DEDENT)) From webhook-mailer at python.org Fri Jun 5 11:01:10 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Fri, 05 Jun 2020 15:01:10 -0000 Subject: [Python-checkins] bpo-1635741: Port mmap module to multiphase initialization (GH-19459) Message-ID: https://github.com/python/cpython/commit/3ad52e366fea37b02a3f619e6b7cffa7dfbdfa2e commit: 3ad52e366fea37b02a3f619e6b7cffa7dfbdfa2e branch: master author: Dong-hee Na committer: GitHub date: 2020-06-06T00:01:02+09:00 summary: bpo-1635741: Port mmap module to multiphase initialization (GH-19459) files: A Misc/NEWS.d/next/Core and Builtins/2020-04-10-23-54-57.bpo-1635741.ZURqoN.rst M Modules/mmapmodule.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-10-23-54-57.bpo-1635741.ZURqoN.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-10-23-54-57.bpo-1635741.ZURqoN.rst new file mode 100644 index 0000000000000..cb849fb9b4430 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-04-10-23-54-57.bpo-1635741.ZURqoN.rst @@ -0,0 +1 @@ +Port :mod:`mmap` to multiphase initialization. diff --git a/Modules/mmapmodule.c b/Modules/mmapmodule.c index 8a60db1e1c469..463bd40e78f4f 100644 --- a/Modules/mmapmodule.c +++ b/Modules/mmapmodule.c @@ -1509,157 +1509,163 @@ new_mmap_object(PyTypeObject *type, PyObject *args, PyObject *kwdict) } #endif /* MS_WINDOWS */ -static void -setint(PyObject *d, const char *name, long value) +static int +mmap_exec(PyObject *module) { - PyObject *o = PyLong_FromLong(value); - if (o) { - PyDict_SetItemString(d, name, o); - Py_DECREF(o); + if (PyType_Ready(&mmap_object_type) < 0) { + return -1; } -} - -static struct PyModuleDef mmapmodule = { - PyModuleDef_HEAD_INIT, - "mmap", - NULL, - -1, - NULL, - NULL, - NULL, - NULL, - NULL -}; - -PyMODINIT_FUNC -PyInit_mmap(void) -{ - PyObject *dict, *module; + Py_INCREF(PyExc_OSError); + if (PyModule_AddObject(module, "error", PyExc_OSError) < 0) { + Py_DECREF(PyExc_OSError); + return -1; + } + if (PyModule_AddType(module, &mmap_object_type) < 0) { + return -1; + } - if (PyType_Ready(&mmap_object_type) < 0) - return NULL; +#define ADD_INT_MACRO(module, constant) \ + do { \ + if (PyModule_AddIntConstant(module, #constant, constant) < 0) { \ + return -1; \ + } \ + } while (0) - module = PyModule_Create(&mmapmodule); - if (module == NULL) - return NULL; - dict = PyModule_GetDict(module); - if (!dict) - return NULL; - PyDict_SetItemString(dict, "error", PyExc_OSError); - PyDict_SetItemString(dict, "mmap", (PyObject*) &mmap_object_type); #ifdef PROT_EXEC - setint(dict, "PROT_EXEC", PROT_EXEC); + ADD_INT_MACRO(module, PROT_EXEC); #endif #ifdef PROT_READ - setint(dict, "PROT_READ", PROT_READ); + ADD_INT_MACRO(module, PROT_READ); #endif #ifdef PROT_WRITE - setint(dict, "PROT_WRITE", PROT_WRITE); + ADD_INT_MACRO(module, PROT_WRITE); #endif #ifdef MAP_SHARED - setint(dict, "MAP_SHARED", MAP_SHARED); + ADD_INT_MACRO(module, MAP_SHARED); #endif #ifdef MAP_PRIVATE - setint(dict, "MAP_PRIVATE", MAP_PRIVATE); + ADD_INT_MACRO(module, MAP_PRIVATE); #endif #ifdef MAP_DENYWRITE - setint(dict, "MAP_DENYWRITE", MAP_DENYWRITE); + ADD_INT_MACRO(module, MAP_DENYWRITE); #endif #ifdef MAP_EXECUTABLE - setint(dict, "MAP_EXECUTABLE", MAP_EXECUTABLE); + ADD_INT_MACRO(module, MAP_EXECUTABLE); #endif #ifdef MAP_ANONYMOUS - setint(dict, "MAP_ANON", MAP_ANONYMOUS); - setint(dict, "MAP_ANONYMOUS", MAP_ANONYMOUS); + if (PyModule_AddIntConstant(module, "MAP_ANON", MAP_ANONYMOUS) < 0 ) { + return -1; + } + ADD_INT_MACRO(module, MAP_ANONYMOUS); #endif #ifdef MAP_POPULATE - setint(dict, "MAP_POPULATE", MAP_POPULATE); + ADD_INT_MACRO(module, MAP_POPULATE); #endif + if (PyModule_AddIntConstant(module, "PAGESIZE", (long)my_getpagesize()) < 0 ) { + return -1; + } - setint(dict, "PAGESIZE", (long)my_getpagesize()); - - setint(dict, "ALLOCATIONGRANULARITY", (long)my_getallocationgranularity()); + if (PyModule_AddIntConstant(module, "ALLOCATIONGRANULARITY", (long)my_getallocationgranularity()) < 0 ) { + return -1; + } - setint(dict, "ACCESS_DEFAULT", ACCESS_DEFAULT); - setint(dict, "ACCESS_READ", ACCESS_READ); - setint(dict, "ACCESS_WRITE", ACCESS_WRITE); - setint(dict, "ACCESS_COPY", ACCESS_COPY); + ADD_INT_MACRO(module, ACCESS_DEFAULT); + ADD_INT_MACRO(module, ACCESS_READ); + ADD_INT_MACRO(module, ACCESS_WRITE); + ADD_INT_MACRO(module, ACCESS_COPY); #ifdef HAVE_MADVISE // Conventional advice values #ifdef MADV_NORMAL - setint(dict, "MADV_NORMAL", MADV_NORMAL); + ADD_INT_MACRO(module, MADV_NORMAL); #endif #ifdef MADV_RANDOM - setint(dict, "MADV_RANDOM", MADV_RANDOM); + ADD_INT_MACRO(module, MADV_RANDOM); #endif #ifdef MADV_SEQUENTIAL - setint(dict, "MADV_SEQUENTIAL", MADV_SEQUENTIAL); + ADD_INT_MACRO(module, MADV_SEQUENTIAL); #endif #ifdef MADV_WILLNEED - setint(dict, "MADV_WILLNEED", MADV_WILLNEED); + ADD_INT_MACRO(module, MADV_WILLNEED); #endif #ifdef MADV_DONTNEED - setint(dict, "MADV_DONTNEED", MADV_DONTNEED); + ADD_INT_MACRO(module, MADV_DONTNEED); #endif // Linux-specific advice values #ifdef MADV_REMOVE - setint(dict, "MADV_REMOVE", MADV_REMOVE); + ADD_INT_MACRO(module, MADV_REMOVE); #endif #ifdef MADV_DONTFORK - setint(dict, "MADV_DONTFORK", MADV_DONTFORK); + ADD_INT_MACRO(module, MADV_DONTFORK); #endif #ifdef MADV_DOFORK - setint(dict, "MADV_DOFORK", MADV_DOFORK); + ADD_INT_MACRO(module, MADV_DOFORK); #endif #ifdef MADV_HWPOISON - setint(dict, "MADV_HWPOISON", MADV_HWPOISON); + ADD_INT_MACRO(module, MADV_HWPOISON); #endif #ifdef MADV_MERGEABLE - setint(dict, "MADV_MERGEABLE", MADV_MERGEABLE); + ADD_INT_MACRO(module, MADV_MERGEABLE); #endif #ifdef MADV_UNMERGEABLE - setint(dict, "MADV_UNMERGEABLE", MADV_UNMERGEABLE); + ADD_INT_MACRO(module, MADV_UNMERGEABLE); #endif #ifdef MADV_SOFT_OFFLINE - setint(dict, "MADV_SOFT_OFFLINE", MADV_SOFT_OFFLINE); + ADD_INT_MACRO(module, MADV_SOFT_OFFLINE); #endif #ifdef MADV_HUGEPAGE - setint(dict, "MADV_HUGEPAGE", MADV_HUGEPAGE); + ADD_INT_MACRO(module, MADV_HUGEPAGE); #endif #ifdef MADV_NOHUGEPAGE - setint(dict, "MADV_NOHUGEPAGE", MADV_NOHUGEPAGE); + ADD_INT_MACRO(module, MADV_NOHUGEPAGE); #endif #ifdef MADV_DONTDUMP - setint(dict, "MADV_DONTDUMP", MADV_DONTDUMP); + ADD_INT_MACRO(module, MADV_DONTDUMP); #endif #ifdef MADV_DODUMP - setint(dict, "MADV_DODUMP", MADV_DODUMP); + ADD_INT_MACRO(module, MADV_DODUMP); #endif #ifdef MADV_FREE // (Also present on FreeBSD and macOS.) - setint(dict, "MADV_FREE", MADV_FREE); + ADD_INT_MACRO(module, MADV_FREE); #endif // FreeBSD-specific #ifdef MADV_NOSYNC - setint(dict, "MADV_NOSYNC", MADV_NOSYNC); + ADD_INT_MACRO(module, MADV_NOSYNC); #endif #ifdef MADV_AUTOSYNC - setint(dict, "MADV_AUTOSYNC", MADV_AUTOSYNC); + ADD_INT_MACRO(module, MADV_AUTOSYNC); #endif #ifdef MADV_NOCORE - setint(dict, "MADV_NOCORE", MADV_NOCORE); + ADD_INT_MACRO(module, MADV_NOCORE); #endif #ifdef MADV_CORE - setint(dict, "MADV_CORE", MADV_CORE); + ADD_INT_MACRO(module, MADV_CORE); #endif #ifdef MADV_PROTECT - setint(dict, "MADV_PROTECT", MADV_PROTECT); + ADD_INT_MACRO(module, MADV_PROTECT); #endif #endif // HAVE_MADVISE + return 0; +} - return module; +static PyModuleDef_Slot mmap_slots[] = { + {Py_mod_exec, mmap_exec}, + {0, NULL} +}; + +static struct PyModuleDef mmapmodule = { + PyModuleDef_HEAD_INIT, + .m_name = "mmap", + .m_size = 0, + .m_slots = mmap_slots, +}; + +PyMODINIT_FUNC +PyInit_mmap(void) +{ + return PyModuleDef_Init(&mmapmodule); } From webhook-mailer at python.org Fri Jun 5 13:43:09 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Fri, 05 Jun 2020 17:43:09 -0000 Subject: [Python-checkins] bpo-40874: Update to libmpdec-2.5.0 (GH-20652) Message-ID: https://github.com/python/cpython/commit/087d612efebe7c64e5f079b07e0454111859830e commit: 087d612efebe7c64e5f079b07e0454111859830e branch: master author: Stefan Krah committer: GitHub date: 2020-06-05T19:43:01+02:00 summary: bpo-40874: Update to libmpdec-2.5.0 (GH-20652) files: M Modules/_decimal/libmpdec/basearith.c M Modules/_decimal/libmpdec/basearith.h M Modules/_decimal/libmpdec/bits.h M Modules/_decimal/libmpdec/constants.c M Modules/_decimal/libmpdec/constants.h M Modules/_decimal/libmpdec/context.c M Modules/_decimal/libmpdec/convolute.c M Modules/_decimal/libmpdec/convolute.h M Modules/_decimal/libmpdec/crt.c M Modules/_decimal/libmpdec/crt.h M Modules/_decimal/libmpdec/difradix2.c M Modules/_decimal/libmpdec/difradix2.h M Modules/_decimal/libmpdec/fnt.c M Modules/_decimal/libmpdec/fnt.h M Modules/_decimal/libmpdec/fourstep.c M Modules/_decimal/libmpdec/fourstep.h M Modules/_decimal/libmpdec/io.c M Modules/_decimal/libmpdec/io.h M Modules/_decimal/libmpdec/literature/fnt.py M Modules/_decimal/libmpdec/literature/matrix-transform.txt M Modules/_decimal/libmpdec/literature/mulmod-64.txt M Modules/_decimal/libmpdec/literature/mulmod-ppro.txt M Modules/_decimal/libmpdec/literature/six-step.txt M Modules/_decimal/libmpdec/literature/umodarith.lisp M Modules/_decimal/libmpdec/mpalloc.c M Modules/_decimal/libmpdec/mpalloc.h M Modules/_decimal/libmpdec/mpdecimal.c M Modules/_decimal/libmpdec/mpdecimal.h M Modules/_decimal/libmpdec/numbertheory.c M Modules/_decimal/libmpdec/numbertheory.h M Modules/_decimal/libmpdec/sixstep.c M Modules/_decimal/libmpdec/sixstep.h M Modules/_decimal/libmpdec/transpose.c M Modules/_decimal/libmpdec/transpose.h M Modules/_decimal/libmpdec/typearith.h M Modules/_decimal/libmpdec/umodarith.h M Modules/_decimal/libmpdec/vccompat.h M Modules/_decimal/libmpdec/vcdiv64.asm diff --git a/Modules/_decimal/libmpdec/basearith.c b/Modules/_decimal/libmpdec/basearith.c index dfe1523927a40..85c608fadf515 100644 --- a/Modules/_decimal/libmpdec/basearith.c +++ b/Modules/_decimal/libmpdec/basearith.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,13 +27,13 @@ #include "mpdecimal.h" -#include -#include -#include + #include +#include + +#include "basearith.h" #include "constants.h" #include "typearith.h" -#include "basearith.h" /*********************************************************************/ @@ -337,6 +337,7 @@ _mpd_basedivmod(mpd_uint_t *q, mpd_uint_t *r, /* D2: loop */ for (j=m; j != MPD_SIZE_MAX; j--) { + assert(2 <= j+n && j+n <= nplusm); /* annotation for scan-build */ /* D3: calculate qhat and rhat */ rhat = _mpd_shortdiv(w2, u+j+n-1, 2, v[n-1]); @@ -652,6 +653,3 @@ _mpd_shortdiv_b(mpd_uint_t *w, const mpd_uint_t *u, mpd_size_t n, return rem; } - - - diff --git a/Modules/_decimal/libmpdec/basearith.h b/Modules/_decimal/libmpdec/basearith.h index 976358a110ecf..d35925aaddb48 100644 --- a/Modules/_decimal/libmpdec/basearith.h +++ b/Modules/_decimal/libmpdec/basearith.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef BASEARITH_H -#define BASEARITH_H +#ifndef LIBMPDEC_BASEARITH_H_ +#define LIBMPDEC_BASEARITH_H_ #include "mpdecimal.h" -#include #include "typearith.h" @@ -216,7 +215,4 @@ _mpd_isallnine(const mpd_uint_t *data, mpd_ssize_t len) MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif /* BASEARITH_H */ - - - +#endif /* LIBMPDEC_BASEARITH_H_ */ diff --git a/Modules/_decimal/libmpdec/bits.h b/Modules/_decimal/libmpdec/bits.h index b5eaa24976ae5..aa9c3e77980c0 100644 --- a/Modules/_decimal/libmpdec/bits.h +++ b/Modules/_decimal/libmpdec/bits.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef BITS_H -#define BITS_H +#ifndef LIBMPDEC_BITS_H_ +#define LIBMPDEC_BITS_H_ #include "mpdecimal.h" -#include /* Check if n is a power of 2. */ @@ -186,7 +185,4 @@ mpd_bsf(mpd_size_t a) #endif /* BSR/BSF */ -#endif /* BITS_H */ - - - +#endif /* LIBMPDEC_BITS_H_ */ diff --git a/Modules/_decimal/libmpdec/constants.c b/Modules/_decimal/libmpdec/constants.c index 2c2d5ea481035..4c4de622bc601 100644 --- a/Modules/_decimal/libmpdec/constants.c +++ b/Modules/_decimal/libmpdec/constants.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,7 +27,6 @@ #include "mpdecimal.h" -#include #include "constants.h" @@ -128,5 +127,3 @@ const char *mpd_clamp_string[MPD_CLAMP_GUARD] = { "CLAMP_DEFAULT", "CLAMP_IEEE_754" }; - - diff --git a/Modules/_decimal/libmpdec/constants.h b/Modules/_decimal/libmpdec/constants.h index c0febfc8772d7..7c1db839c20ba 100644 --- a/Modules/_decimal/libmpdec/constants.h +++ b/Modules/_decimal/libmpdec/constants.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,14 @@ */ -#ifndef CONSTANTS_H -#define CONSTANTS_H +#ifndef LIBMPDEC_CONSTANTS_H_ +#define LIBMPDEC_CONSTANTS_H_ #include "mpdecimal.h" +#include + /* Internal header file: all symbols have local scope in the DSO */ MPD_PRAGMA(MPD_HIDE_SYMBOLS_START) @@ -84,7 +86,4 @@ extern const mpd_uint_t UH_P1P2; MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif /* CONSTANTS_H */ - - - +#endif /* LIBMPDEC_CONSTANTS_H_ */ diff --git a/Modules/_decimal/libmpdec/context.c b/Modules/_decimal/libmpdec/context.c index 24c7b890c1d98..9cbc20509595d 100644 --- a/Modules/_decimal/libmpdec/context.c +++ b/Modules/_decimal/libmpdec/context.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,14 +27,16 @@ #include "mpdecimal.h" + +#include #include #include -#include void -mpd_dflt_traphandler(mpd_context_t *ctx UNUSED) +mpd_dflt_traphandler(mpd_context_t *ctx) { + (void)ctx; raise(SIGFPE); } @@ -282,5 +284,3 @@ mpd_addstatus_raise(mpd_context_t *ctx, uint32_t flags) mpd_traphandler(ctx); } } - - diff --git a/Modules/_decimal/libmpdec/convolute.c b/Modules/_decimal/libmpdec/convolute.c index 4c62e8bd3abd8..4bc8e8b5fd32f 100644 --- a/Modules/_decimal/libmpdec/convolute.c +++ b/Modules/_decimal/libmpdec/convolute.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,15 +27,14 @@ #include "mpdecimal.h" -#include #include "bits.h" #include "constants.h" +#include "convolute.h" #include "fnt.h" #include "fourstep.h" #include "numbertheory.h" #include "sixstep.h" #include "umodarith.h" -#include "convolute.h" /* Bignum: Fast convolution using the Number Theoretic Transform. Used for @@ -170,5 +169,3 @@ fnt_autoconvolute(mpd_uint_t *c1, mpd_size_t n, int modnum) return 1; } - - diff --git a/Modules/_decimal/libmpdec/convolute.h b/Modules/_decimal/libmpdec/convolute.h index f30a177a68406..62edb3e45739c 100644 --- a/Modules/_decimal/libmpdec/convolute.h +++ b/Modules/_decimal/libmpdec/convolute.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef CONVOLUTE_H -#define CONVOLUTE_H +#ifndef LIBMPDEC_CONVOLUTE_H_ +#define LIBMPDEC_CONVOLUTE_H_ #include "mpdecimal.h" -#include /* Internal header file: all symbols have local scope in the DSO */ @@ -47,4 +46,4 @@ int fnt_autoconvolute(mpd_uint_t *c1, mpd_size_t n, int modnum); MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif +#endif /* LIBMPDEC_CONVOLUTE_H_ */ diff --git a/Modules/_decimal/libmpdec/crt.c b/Modules/_decimal/libmpdec/crt.c index 4a1e80a232284..613274ee0c5b5 100644 --- a/Modules/_decimal/libmpdec/crt.c +++ b/Modules/_decimal/libmpdec/crt.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,11 +27,14 @@ #include "mpdecimal.h" -#include + #include + +#include "constants.h" +#include "crt.h" #include "numbertheory.h" #include "umodarith.h" -#include "crt.h" +#include "typearith.h" /* Bignum: Chinese Remainder Theorem, extends the maximum transform length. */ @@ -175,5 +178,3 @@ crt3(mpd_uint_t *x1, mpd_uint_t *x2, mpd_uint_t *x3, mpd_size_t rsize) assert(carry[0] == 0 && carry[1] == 0 && carry[2] == 0); } - - diff --git a/Modules/_decimal/libmpdec/crt.h b/Modules/_decimal/libmpdec/crt.h index f61e77293632e..15a347d4cb31e 100644 --- a/Modules/_decimal/libmpdec/crt.h +++ b/Modules/_decimal/libmpdec/crt.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef CRT_H -#define CRT_H +#ifndef LIBMPDEC_CRT_H_ +#define LIBMPDEC_CRT_H_ #include "mpdecimal.h" -#include /* Internal header file: all symbols have local scope in the DSO */ @@ -44,4 +43,4 @@ void crt3(mpd_uint_t *x1, mpd_uint_t *x2, mpd_uint_t *x3, mpd_size_t nmemb); MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif +#endif /* LIBMPDEC_CRT_H_ */ diff --git a/Modules/_decimal/libmpdec/difradix2.c b/Modules/_decimal/libmpdec/difradix2.c index 06e5ab5e222ee..049ecff65b6ee 100644 --- a/Modules/_decimal/libmpdec/difradix2.c +++ b/Modules/_decimal/libmpdec/difradix2.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,12 +27,14 @@ #include "mpdecimal.h" -#include + #include + #include "bits.h" +#include "constants.h" +#include "difradix2.h" #include "numbertheory.h" #include "umodarith.h" -#include "difradix2.h" /* Bignum: The actual transform routine (decimation in frequency). */ @@ -169,5 +171,3 @@ fnt_dif2(mpd_uint_t a[], mpd_size_t n, struct fnt_params *tparams) bitreverse_permute(a, n); } - - diff --git a/Modules/_decimal/libmpdec/difradix2.h b/Modules/_decimal/libmpdec/difradix2.h index 5e22bcf324fac..cdcbcf9a71043 100644 --- a/Modules/_decimal/libmpdec/difradix2.h +++ b/Modules/_decimal/libmpdec/difradix2.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef DIF_RADIX2_H -#define DIF_RADIX2_H +#ifndef LIBMPDEC_DIFRADIX2_H_ +#define LIBMPDEC_DIFRADIX2_H_ #include "mpdecimal.h" -#include #include "numbertheory.h" @@ -45,4 +44,4 @@ void fnt_dif2(mpd_uint_t a[], mpd_size_t n, struct fnt_params *tparams); MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif +#endif /* LIBMPDEC_DIFRADIX2_H_ */ diff --git a/Modules/_decimal/libmpdec/fnt.c b/Modules/_decimal/libmpdec/fnt.c index 7e924c85242b0..0dbe98fc71c9e 100644 --- a/Modules/_decimal/libmpdec/fnt.c +++ b/Modules/_decimal/libmpdec/fnt.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,13 +27,14 @@ #include "mpdecimal.h" -#include -#include + #include +#include + #include "bits.h" #include "difradix2.h" -#include "numbertheory.h" #include "fnt.h" +#include "numbertheory.h" /* Bignum: Fast transform for medium-sized coefficients. */ @@ -76,6 +77,3 @@ std_inv_fnt(mpd_uint_t *a, mpd_size_t n, int modnum) mpd_free(tparams); return 1; } - - - diff --git a/Modules/_decimal/libmpdec/fnt.h b/Modules/_decimal/libmpdec/fnt.h index fa2154a798d45..5222c476a3a4f 100644 --- a/Modules/_decimal/libmpdec/fnt.h +++ b/Modules/_decimal/libmpdec/fnt.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef FNT_H -#define FNT_H +#ifndef LIBMPDEC_FNT_H_ +#define LIBMPDEC_FNT_H_ #include "mpdecimal.h" -#include /* Internal header file: all symbols have local scope in the DSO */ @@ -45,5 +44,4 @@ int std_inv_fnt(mpd_uint_t a[], mpd_size_t n, int modnum); MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif - +#endif /* LIBMPDEC_FNT_H_ */ diff --git a/Modules/_decimal/libmpdec/fourstep.c b/Modules/_decimal/libmpdec/fourstep.c index 21d3e7485df4d..fb173ed5a52e4 100644 --- a/Modules/_decimal/libmpdec/fourstep.c +++ b/Modules/_decimal/libmpdec/fourstep.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,12 +27,14 @@ #include "mpdecimal.h" + #include + +#include "constants.h" +#include "fourstep.h" #include "numbertheory.h" #include "sixstep.h" -#include "transpose.h" #include "umodarith.h" -#include "fourstep.h" /* Bignum: Cache efficient Matrix Fourier Transform for arrays of the @@ -187,6 +189,7 @@ four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum) #if 0 /* An unordered transform is sufficient for convolution. */ /* Transpose the matrix. */ + #include "transpose.h" transpose_3xpow2(a, R, C); #endif @@ -217,6 +220,7 @@ inv_four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum) #if 0 /* An unordered transform is sufficient for convolution. */ /* Transpose the matrix, producing an R*C matrix. */ + #include "transpose.h" transpose_3xpow2(a, C, R); #endif @@ -253,5 +257,3 @@ inv_four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum) return 1; } - - diff --git a/Modules/_decimal/libmpdec/fourstep.h b/Modules/_decimal/libmpdec/fourstep.h index 80dcd4be3d59b..5ffb6fcc8ecd0 100644 --- a/Modules/_decimal/libmpdec/fourstep.h +++ b/Modules/_decimal/libmpdec/fourstep.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef FOUR_STEP_H -#define FOUR_STEP_H +#ifndef LIBMPDEC_FOURSTEP_H_ +#define LIBMPDEC_FOURSTEP_H_ #include "mpdecimal.h" -#include /* Internal header file: all symbols have local scope in the DSO */ @@ -45,4 +44,4 @@ int inv_four_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum); MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif +#endif /* LIBMPDEC_FOURSTEP_H_ */ diff --git a/Modules/_decimal/libmpdec/io.c b/Modules/_decimal/libmpdec/io.c index f45e558f1a957..9513a68e3782d 100644 --- a/Modules/_decimal/libmpdec/io.c +++ b/Modules/_decimal/libmpdec/io.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,16 +27,16 @@ #include "mpdecimal.h" -#include -#include -#include -#include -#include + #include +#include #include +#include #include -#include "bits.h" -#include "constants.h" +#include +#include +#include + #include "typearith.h" #include "io.h" @@ -277,7 +277,7 @@ mpd_qset_string(mpd_t *dec, const char *s, const mpd_context_t *ctx, } } - digits = end - coeff; + digits = end - coeff; if (dpoint) { size_t fracdigits = end-dpoint-1; if (dpoint > coeff) digits--; @@ -326,6 +326,22 @@ mpd_qset_string(mpd_t *dec, const char *s, const mpd_context_t *ctx, mpd_seterror(dec, MPD_Conversion_syntax, status); } +/* convert a character string to a decimal, use a maxcontext for conversion */ +void +mpd_qset_string_exact(mpd_t *dec, const char *s, uint32_t *status) +{ + mpd_context_t maxcontext; + + mpd_maxcontext(&maxcontext); + mpd_qset_string(dec, s, &maxcontext, status); + + if (*status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) { + /* we want exact results */ + mpd_seterror(dec, MPD_Invalid_operation, status); + } + *status &= MPD_Errors; +} + /* Print word x with n decimal digits to string s. dot is either NULL or the location of a decimal point. */ #define EXTRACT_DIGIT(s, x, d, dot) \ @@ -539,8 +555,8 @@ _mpd_to_string(char **result, const mpd_t *dec, int flags, mpd_ssize_t dplace) dplace = -1 + mod_mpd_ssize_t(dec->exp+2, 3); } else { /* ldigits-1 is the adjusted exponent, which - * should be divisible by three. If not, move - * dplace one or two places to the right. */ + * should be divisible by three. If not, move + * dplace one or two places to the right. */ dplace += mod_mpd_ssize_t(ldigits-1, 3); } } @@ -1247,7 +1263,7 @@ mpd_qformat_spec(const mpd_t *dec, const mpd_spec_t *spec, } if (isupper((uchar)type)) { - type = tolower((uchar)type); + type = (char)tolower((uchar)type); flags |= MPD_FMT_UPPER; } if (spec->sign == ' ') { @@ -1265,6 +1281,7 @@ mpd_qformat_spec(const mpd_t *dec, const mpd_spec_t *spec, stackspec.align = '>'; spec = &stackspec; } + assert(strlen(spec->fill) == 1); /* annotation for scan-build */ if (type == '%') { flags |= MPD_FMT_PERCENT; } @@ -1579,5 +1596,3 @@ mpd_print(const mpd_t *dec) fputs("mpd_fprint: output error\n", stderr); /* GCOV_NOT_REACHED */ } } - - diff --git a/Modules/_decimal/libmpdec/io.h b/Modules/_decimal/libmpdec/io.h index de5486a00ca56..79d7c05ce369c 100644 --- a/Modules/_decimal/libmpdec/io.h +++ b/Modules/_decimal/libmpdec/io.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,17 +26,20 @@ */ -#ifndef IO_H -#define IO_H +#ifndef LIBMPDEC_IO_H_ +#define LIBMPDEC_IO_H_ -#include #include "mpdecimal.h" +#include + #if SIZE_MAX == MPD_SIZE_MAX #define mpd_strtossize _mpd_strtossize #else +#include + static inline mpd_ssize_t mpd_strtossize(const char *s, char **end, int base) { @@ -56,4 +59,4 @@ mpd_strtossize(const char *s, char **end, int base) #endif -#endif +#endif /* LIBMPDEC_IO_H_ */ diff --git a/Modules/_decimal/libmpdec/literature/fnt.py b/Modules/_decimal/libmpdec/literature/fnt.py index 6363536da6487..c1285a565db96 100644 --- a/Modules/_decimal/libmpdec/literature/fnt.py +++ b/Modules/_decimal/libmpdec/literature/fnt.py @@ -1,5 +1,5 @@ # -# Copyright (c) 2008-2016 Stefan Krah. All rights reserved. +# Copyright (c) 2008-2020 Stefan Krah. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions diff --git a/Modules/_decimal/libmpdec/literature/matrix-transform.txt b/Modules/_decimal/libmpdec/literature/matrix-transform.txt index 701d85d6b43c8..6e7ad7420909f 100644 --- a/Modules/_decimal/libmpdec/literature/matrix-transform.txt +++ b/Modules/_decimal/libmpdec/literature/matrix-transform.txt @@ -1,6 +1,6 @@ -(* Copyright (c) 2011 Stefan Krah. All rights reserved. *) +(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *) The Matrix Fourier Transform: diff --git a/Modules/_decimal/libmpdec/literature/mulmod-64.txt b/Modules/_decimal/libmpdec/literature/mulmod-64.txt index 029b8de3d7c92..fa967bf95e303 100644 --- a/Modules/_decimal/libmpdec/literature/mulmod-64.txt +++ b/Modules/_decimal/libmpdec/literature/mulmod-64.txt @@ -1,6 +1,6 @@ -(* Copyright (c) 2011 Stefan Krah. All rights reserved. *) +(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *) ========================================================================== diff --git a/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt b/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt index 4d17a928e6eae..ba804e4b4e786 100644 --- a/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt +++ b/Modules/_decimal/libmpdec/literature/mulmod-ppro.txt @@ -1,6 +1,6 @@ -(* Copyright (c) 2011 Stefan Krah. All rights reserved. *) +(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *) ======================================================================== diff --git a/Modules/_decimal/libmpdec/literature/six-step.txt b/Modules/_decimal/libmpdec/literature/six-step.txt index 8e45f48758478..852d5b0df8bf3 100644 --- a/Modules/_decimal/libmpdec/literature/six-step.txt +++ b/Modules/_decimal/libmpdec/literature/six-step.txt @@ -1,6 +1,6 @@ -(* Copyright (c) 2011 Stefan Krah. All rights reserved. *) +(* Copyright (c) 2011-2020 Stefan Krah. All rights reserved. *) The Six Step Transform: diff --git a/Modules/_decimal/libmpdec/literature/umodarith.lisp b/Modules/_decimal/libmpdec/literature/umodarith.lisp index 99d71c373d1ab..d71f074a26dcc 100644 --- a/Modules/_decimal/libmpdec/literature/umodarith.lisp +++ b/Modules/_decimal/libmpdec/literature/umodarith.lisp @@ -1,5 +1,5 @@ ; -; Copyright (c) 2008-2016 Stefan Krah. All rights reserved. +; Copyright (c) 2008-2020 Stefan Krah. All rights reserved. ; ; Redistribution and use in source and binary forms, with or without ; modification, are permitted provided that the following conditions @@ -149,7 +149,7 @@ (defthmd addmod-correct (implies (and (< 0 m) (< m base) - (< a m) (<= b m) + (< a m) (<= b m) (natp m) (natp base) (natp a) (natp b)) (equal (addmod a b m base) @@ -179,7 +179,7 @@ (defthmd submod-correct (implies (and (< 0 m) (< m base) - (< a m) (<= b m) + (< a m) (<= b m) (natp m) (natp base) (natp a) (natp b)) (equal (submod a b m base) @@ -200,7 +200,7 @@ (defthm submod-2-correct (implies (and (< 0 m) (< m base) - (< a m) (<= b m) + (< a m) (<= b m) (natp m) (natp base) (natp a) (natp b)) (equal (submod-2 a b m base) @@ -231,7 +231,7 @@ (defthmd ext-submod-ext-submod-2-equal (implies (and (< 0 m) (< m base) - (< a (* 2 m)) (< b (* 2 m)) + (< a (* 2 m)) (< b (* 2 m)) (natp m) (natp base) (natp a) (natp b)) (equal (ext-submod a b m base) @@ -239,7 +239,7 @@ (defthmd ext-submod-2-correct (implies (and (< 0 m) (< m base) - (< a (* 2 m)) (< b (* 2 m)) + (< a (* 2 m)) (< b (* 2 m)) (natp m) (natp base) (natp a) (natp b)) (equal (ext-submod-2 a b m base) @@ -257,7 +257,7 @@ (defthmd dw-reduce-correct (implies (and (< 0 m) (< m base) - (< hi base) (< lo base) + (< hi base) (< lo base) (natp m) (natp base) (natp hi) (natp lo)) (equal (dw-reduce hi lo m base) @@ -322,7 +322,7 @@ (defthmd dw-submod-correct (implies (and (< 0 m) (< m base) (natp a) (< a m) - (< hi base) (< lo base) + (< hi base) (< lo base) (natp m) (natp base) (natp hi) (natp lo)) (equal (dw-submod a hi lo m base) diff --git a/Modules/_decimal/libmpdec/mpalloc.c b/Modules/_decimal/libmpdec/mpalloc.c index a854e09911bd3..eb5ee7a807b33 100644 --- a/Modules/_decimal/libmpdec/mpalloc.c +++ b/Modules/_decimal/libmpdec/mpalloc.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,10 +27,14 @@ #include "mpdecimal.h" + +#include #include #include -#include "typearith.h" +#include + #include "mpalloc.h" +#include "typearith.h" #if defined(_MSC_VER) @@ -294,4 +298,59 @@ mpd_realloc_dyn(mpd_t *result, mpd_ssize_t nwords, uint32_t *status) return 1; } +/* + * Input: 'result' is a static mpd_t with a static coefficient. + * Assumption: 'nwords' >= result->alloc. + * + * Resize the static coefficient to a larger dynamic one and copy the + * existing data. + * + * On failure the value of 'result' is unchanged. + */ +int +mpd_switch_to_dyn_cxx(mpd_t *result, mpd_ssize_t nwords) +{ + assert(nwords >= result->alloc); + + mpd_uint_t *data = mpd_alloc(nwords, sizeof *result->data); + if (data == NULL) { + return 0; + } + + memcpy(data, result->data, result->alloc * (sizeof *result->data)); + result->data = data; + result->alloc = nwords; + mpd_set_dynamic_data(result); + return 1; +} +/* + * Input: 'result' is a static or a dynamic mpd_t with a dynamic coefficient. + * Resize the coefficient to length 'nwords': + * Case nwords > result->alloc: + * If realloc is successful: + * 'result' has a larger coefficient but the same value. Return 1. + * Otherwise: + * 'result' has a the same coefficient. Return 0. + * Case nwords < result->alloc: + * If realloc is successful: + * 'result' has a smaller coefficient. result->len is undefined. Return 1. + * Otherwise (unlikely): + * 'result' is unchanged. Reuse the now oversized coefficient. Return 1. + */ +int +mpd_realloc_dyn_cxx(mpd_t *result, mpd_ssize_t nwords) +{ + uint8_t err = 0; + + mpd_uint_t *p = mpd_realloc(result->data, nwords, sizeof *result->data, &err); + if (!err) { + result->data = p; + result->alloc = nwords; + } + else if (nwords > result->alloc) { + return 0; + } + + return 1; +} diff --git a/Modules/_decimal/libmpdec/mpalloc.h b/Modules/_decimal/libmpdec/mpalloc.h index efd711953a398..186808457b25c 100644 --- a/Modules/_decimal/libmpdec/mpalloc.h +++ b/Modules/_decimal/libmpdec/mpalloc.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,14 @@ */ -#ifndef MPALLOC_H -#define MPALLOC_H +#ifndef LIBMPDEC_MPALLOC_H_ +#define LIBMPDEC_MPALLOC_H_ #include "mpdecimal.h" +#include + /* Internal header file: all symbols have local scope in the DSO */ MPD_PRAGMA(MPD_HIDE_SYMBOLS_START) @@ -41,11 +43,11 @@ int mpd_switch_to_dyn(mpd_t *result, mpd_ssize_t size, uint32_t *status); int mpd_switch_to_dyn_zero(mpd_t *result, mpd_ssize_t size, uint32_t *status); int mpd_realloc_dyn(mpd_t *result, mpd_ssize_t size, uint32_t *status); - -MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ +int mpd_switch_to_dyn_cxx(mpd_t *result, mpd_ssize_t size); +int mpd_realloc_dyn_cxx(mpd_t *result, mpd_ssize_t size); -#endif - +MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ +#endif /* LIBMPDEC_MPALLOC_H_ */ diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c index 0986edb576a10..ad8db508b36f0 100644 --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,18 +27,21 @@ #include "mpdecimal.h" + +#include +#include +#include #include #include #include -#include -#include + #include "basearith.h" #include "bits.h" +#include "constants.h" #include "convolute.h" #include "crt.h" #include "mpalloc.h" #include "typearith.h" -#include "umodarith.h" #ifdef PPRO #if defined(_MSC_VER) @@ -241,7 +244,7 @@ mpd_lsd(mpd_uint_t word) } /* Coefficient size needed to store 'digits' */ -ALWAYS_INLINE mpd_ssize_t +mpd_ssize_t mpd_digits_to_size(mpd_ssize_t digits) { mpd_ssize_t q, r; @@ -260,8 +263,9 @@ mpd_exp_digits(mpd_ssize_t exp) /* Canonical */ ALWAYS_INLINE int -mpd_iscanonical(const mpd_t *dec UNUSED) +mpd_iscanonical(const mpd_t *dec) { + (void)dec; return 1; } @@ -512,6 +516,28 @@ mpd_qresize(mpd_t *result, mpd_ssize_t nwords, uint32_t *status) return mpd_realloc_dyn(result, nwords, status); } +/* Same as mpd_qresize, but do not set the result no NaN on failure. */ +static ALWAYS_INLINE int +mpd_qresize_cxx(mpd_t *result, mpd_ssize_t nwords) +{ + assert(!mpd_isconst_data(result)); /* illegal operation for a const */ + assert(!mpd_isshared_data(result)); /* illegal operation for a shared */ + assert(MPD_MINALLOC <= result->alloc); + + nwords = (nwords <= MPD_MINALLOC) ? MPD_MINALLOC : nwords; + if (nwords == result->alloc) { + return 1; + } + if (mpd_isstatic_data(result)) { + if (nwords > result->alloc) { + return mpd_switch_to_dyn_cxx(result, nwords); + } + return 1; + } + + return mpd_realloc_dyn_cxx(result, nwords); +} + /* Same as mpd_qresize, but the complete coefficient (including the old * memory area!) is initialized to zero. */ ALWAYS_INLINE int @@ -1192,7 +1218,7 @@ _c32setu64(mpd_t *result, uint64_t u, uint8_t sign, uint32_t *status) result->data[i] = w[i]; } - mpd_set_sign(result, sign); + mpd_set_flags(result, sign); result->exp = 0; result->len = len; mpd_setdigits(result); @@ -1244,6 +1270,26 @@ mpd_qset_i64(mpd_t *result, int64_t a, const mpd_context_t *ctx, #endif } +/* quietly set a decimal from an int64_t, use a maxcontext for conversion */ +void +mpd_qset_i64_exact(mpd_t *result, int64_t a, uint32_t *status) +{ + mpd_context_t maxcontext; + + mpd_maxcontext(&maxcontext); +#ifdef CONFIG_64 + mpd_qset_ssize(result, a, &maxcontext, status); +#else + _c32_qset_i64(result, a, &maxcontext, status); +#endif + + if (*status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) { + /* we want exact results */ + mpd_seterror(result, MPD_Invalid_operation, status); + } + *status &= MPD_Errors; +} + /* quietly set a decimal from a uint64_t */ void mpd_qset_u64(mpd_t *result, uint64_t a, const mpd_context_t *ctx, @@ -1255,8 +1301,27 @@ mpd_qset_u64(mpd_t *result, uint64_t a, const mpd_context_t *ctx, _c32_qset_u64(result, a, ctx, status); #endif } -#endif /* !LEGACY_COMPILER */ +/* quietly set a decimal from a uint64_t, use a maxcontext for conversion */ +void +mpd_qset_u64_exact(mpd_t *result, uint64_t a, uint32_t *status) +{ + mpd_context_t maxcontext; + + mpd_maxcontext(&maxcontext); +#ifdef CONFIG_64 + mpd_qset_uint(result, a, &maxcontext, status); +#else + _c32_qset_u64(result, a, &maxcontext, status); +#endif + + if (*status & (MPD_Inexact|MPD_Rounded|MPD_Clamped)) { + /* we want exact results */ + mpd_seterror(result, MPD_Invalid_operation, status); + } + *status &= MPD_Errors; +} +#endif /* !LEGACY_COMPILER */ /* * Quietly get an mpd_uint_t from a decimal. Assumes @@ -1345,11 +1410,13 @@ mpd_qabs_uint(const mpd_t *a, uint32_t *status) mpd_ssize_t mpd_qget_ssize(const mpd_t *a, uint32_t *status) { + uint32_t workstatus = 0; mpd_uint_t u; int isneg; - u = mpd_qabs_uint(a, status); - if (*status&MPD_Invalid_operation) { + u = mpd_qabs_uint(a, &workstatus); + if (workstatus&MPD_Invalid_operation) { + *status |= workstatus; return MPD_SSIZE_MAX; } @@ -1469,9 +1536,11 @@ mpd_qget_i64(const mpd_t *a, uint32_t *status) uint32_t mpd_qget_u32(const mpd_t *a, uint32_t *status) { - uint64_t x = mpd_qget_uint(a, status); + uint32_t workstatus = 0; + uint64_t x = mpd_qget_uint(a, &workstatus); - if (*status&MPD_Invalid_operation) { + if (workstatus&MPD_Invalid_operation) { + *status |= workstatus; return UINT32_MAX; } if (x > UINT32_MAX) { @@ -1486,9 +1555,11 @@ mpd_qget_u32(const mpd_t *a, uint32_t *status) int32_t mpd_qget_i32(const mpd_t *a, uint32_t *status) { - int64_t x = mpd_qget_ssize(a, status); + uint32_t workstatus = 0; + int64_t x = mpd_qget_ssize(a, &workstatus); - if (*status&MPD_Invalid_operation) { + if (workstatus&MPD_Invalid_operation) { + *status |= workstatus; return INT32_MAX; } if (x < INT32_MIN || x > INT32_MAX) { @@ -1504,14 +1575,20 @@ mpd_qget_i32(const mpd_t *a, uint32_t *status) uint64_t mpd_qget_u64(const mpd_t *a, uint32_t *status) { - return _c32_qget_u64(1, a, status); + uint32_t workstatus = 0; + uint64_t x = _c32_qget_u64(1, a, &workstatus); + *status |= workstatus; + return x; } /* quietly get an int64_t from a decimal */ int64_t mpd_qget_i64(const mpd_t *a, uint32_t *status) { - return _c32_qget_i64(a, status); + uint32_t workstatus = 0; + int64_t x = _c32_qget_i64(a, &workstatus); + *status |= workstatus; + return x; } #endif @@ -1937,6 +2014,25 @@ mpd_qcopy(mpd_t *result, const mpd_t *a, uint32_t *status) return 1; } +/* Same as mpd_qcopy, but do not set the result to NaN on failure. */ +int +mpd_qcopy_cxx(mpd_t *result, const mpd_t *a) +{ + if (result == a) return 1; + + if (!mpd_qresize_cxx(result, a->len)) { + return 0; + } + + mpd_copy_flags(result, a); + result->exp = a->exp; + result->digits = a->digits; + result->len = a->len; + memcpy(result->data, a->data, a->len * (sizeof *result->data)); + + return 1; +} + /* * Copy to a decimal with a static buffer. The caller has to make sure that * the buffer is big enough. Cannot fail. @@ -3780,11 +3876,31 @@ void mpd_qdiv(mpd_t *q, const mpd_t *a, const mpd_t *b, const mpd_context_t *ctx, uint32_t *status) { - _mpd_qdiv(SET_IDEAL_EXP, q, a, b, ctx, status); + MPD_NEW_STATIC(aa,0,0,0,0); + MPD_NEW_STATIC(bb,0,0,0,0); + uint32_t xstatus = 0; - if (*status & MPD_Malloc_error) { + if (q == a) { + if (!mpd_qcopy(&aa, a, status)) { + mpd_seterror(q, MPD_Malloc_error, status); + goto out; + } + a = &aa; + } + + if (q == b) { + if (!mpd_qcopy(&bb, b, status)) { + mpd_seterror(q, MPD_Malloc_error, status); + goto out; + } + b = &bb; + } + + _mpd_qdiv(SET_IDEAL_EXP, q, a, b, ctx, &xstatus); + + if (xstatus & (MPD_Malloc_error|MPD_Division_impossible)) { /* Inexact quotients (the usual case) fill the entire context precision, - * which can lead to malloc() failures for very high precisions. Retry + * which can lead to the above errors for very high precisions. Retry * the operation with a lower precision in case the result is exact. * * We need an upper bound for the number of digits of a_coeff / b_coeff @@ -3799,25 +3915,33 @@ mpd_qdiv(mpd_t *q, const mpd_t *a, const mpd_t *b, * We arrive at a total upper bound: * * maxdigits(a_coeff') + maxdigits(1 / b_coeff') <= - * a->digits + log2(b_coeff) = - * a->digits + log10(b_coeff) / log10(2) <= + * log10(a_coeff) + log2(b_coeff) = + * log10(a_coeff) + log10(b_coeff) / log10(2) <= * a->digits + b->digits * 4; */ - uint32_t workstatus = 0; mpd_context_t workctx = *ctx; + uint32_t ystatus = 0; + workctx.prec = a->digits + b->digits * 4; if (workctx.prec >= ctx->prec) { - return; /* No point in retrying, keep the original error. */ + *status |= (xstatus&MPD_Errors); + goto out; /* No point in retrying, keep the original error. */ } - _mpd_qdiv(SET_IDEAL_EXP, q, a, b, &workctx, &workstatus); - if (workstatus == 0) { /* The result is exact, unrounded, normal etc. */ - *status = 0; - return; + _mpd_qdiv(SET_IDEAL_EXP, q, a, b, &workctx, &ystatus); + if (ystatus != 0) { + ystatus = *status | ((ystatus|xstatus)&MPD_Errors); + mpd_seterror(q, ystatus, status); } - - mpd_seterror(q, *status, status); } + else { + *status |= xstatus; + } + + +out: + mpd_del(&aa); + mpd_del(&bb); } /* Internal function. */ @@ -3907,6 +4031,7 @@ _mpd_qdivmod(mpd_t *q, mpd_t *r, const mpd_t *a, const mpd_t *b, } if (b->len == 1) { + assert(b->data[0] != 0); /* annotation for scan-build */ if (a->len == 1) { _mpd_div_word(&q->data[0], &r->data[0], a->data[0], b->data[0]); } @@ -6251,9 +6376,11 @@ _mpd_qpow_int(mpd_t *result, const mpd_t *base, const mpd_t *exp, workctx.round = MPD_ROUND_HALF_EVEN; workctx.clamp = 0; if (mpd_isnegative(exp)) { + uint32_t workstatus = 0; workctx.prec += 1; - mpd_qdiv(&tbase, &one, base, &workctx, status); - if (*status&MPD_Errors) { + mpd_qdiv(&tbase, &one, base, &workctx, &workstatus); + *status |= workstatus; + if (workstatus&MPD_Errors) { mpd_setspecial(result, MPD_POS, MPD_NAN); goto finish; } @@ -6988,6 +7115,8 @@ mpd_qrem_near(mpd_t *r, const mpd_t *a, const mpd_t *b, mpd_ssize_t expdiff, qdigits; int cmp, isodd, allnine; + assert(r != NULL); /* annotation for scan-build */ + if (mpd_isspecial(a) || mpd_isspecial(b)) { if (mpd_qcheck_nans(r, a, b, ctx, status)) { return; @@ -7218,6 +7347,11 @@ void mpd_qtrunc(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, uint32_t *status) { + if (mpd_isspecial(a)) { + mpd_seterror(result, MPD_Invalid_operation, status); + return; + } + (void)_mpd_qround_to_integral(TO_INT_TRUNC, result, a, ctx, status); } @@ -7226,6 +7360,12 @@ mpd_qfloor(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, uint32_t *status) { mpd_context_t workctx = *ctx; + + if (mpd_isspecial(a)) { + mpd_seterror(result, MPD_Invalid_operation, status); + return; + } + workctx.round = MPD_ROUND_FLOOR; (void)_mpd_qround_to_integral(TO_INT_SILENT, result, a, &workctx, status); @@ -7236,6 +7376,12 @@ mpd_qceil(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, uint32_t *status) { mpd_context_t workctx = *ctx; + + if (mpd_isspecial(a)) { + mpd_seterror(result, MPD_Invalid_operation, status); + return; + } + workctx.round = MPD_ROUND_CEILING; (void)_mpd_qround_to_integral(TO_INT_SILENT, result, a, &workctx, status); @@ -7877,9 +8023,20 @@ void mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, uint32_t *status) { - _mpd_qsqrt(result, a, ctx, status); + MPD_NEW_STATIC(aa,0,0,0,0); + uint32_t xstatus = 0; + + if (result == a) { + if (!mpd_qcopy(&aa, a, status)) { + mpd_seterror(result, MPD_Malloc_error, status); + goto out; + } + a = &aa; + } + + _mpd_qsqrt(result, a, ctx, &xstatus); - if (*status & (MPD_Malloc_error|MPD_Division_impossible)) { + if (xstatus & (MPD_Malloc_error|MPD_Division_impossible)) { /* The above conditions can occur at very high context precisions * if intermediate values get too large. Retry the operation with * a lower context precision in case the result is exact. @@ -7889,22 +8046,27 @@ mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, * * NOTE: sqrt(40e9) = 2.0e+5 /\ digits(40e9) = digits(2.0e+5) = 2 */ - uint32_t workstatus = 0; + uint32_t ystatus = 0; mpd_context_t workctx = *ctx; - workctx.prec = a->digits; + workctx.prec = a->digits; if (workctx.prec >= ctx->prec) { - return; /* No point in repeating this, keep the original error. */ + *status |= (xstatus|MPD_Errors); + goto out; /* No point in repeating this, keep the original error. */ } - _mpd_qsqrt(result, a, &workctx, &workstatus); - if (workstatus == 0) { - *status = 0; - return; + _mpd_qsqrt(result, a, &workctx, &ystatus); + if (ystatus != 0) { + ystatus = *status | ((xstatus|ystatus)&MPD_Errors); + mpd_seterror(result, ystatus, status); } - - mpd_seterror(result, *status, status); } + else { + *status |= xstatus; + } + +out: + mpd_del(&aa); } @@ -7918,6 +8080,7 @@ mpd_sizeinbase(const mpd_t *a, uint32_t base) { double x; size_t digits; + double upper_bound; assert(mpd_isinteger(a)); assert(base >= 2); @@ -7934,10 +8097,14 @@ mpd_sizeinbase(const mpd_t *a, uint32_t base) if (digits > 2711437152599294ULL) { return SIZE_MAX; } + + upper_bound = (double)((1ULL<<53)-1); +#else + upper_bound = (double)(SIZE_MAX-1); #endif x = (double)digits / log10(base); - return (x > SIZE_MAX-1) ? SIZE_MAX : (size_t)x + 1; + return (x > upper_bound) ? SIZE_MAX : (size_t)x + 1; } /* Space needed to import a base 'base' integer of length 'srclen'. */ @@ -7945,6 +8112,7 @@ static mpd_ssize_t _mpd_importsize(size_t srclen, uint32_t base) { double x; + double upper_bound; assert(srclen > 0); assert(base >= 2); @@ -7953,10 +8121,15 @@ _mpd_importsize(size_t srclen, uint32_t base) if (srclen > (1ULL<<53)) { return MPD_SSIZE_MAX; } + + assert((1ULL<<53) <= MPD_MAXIMPORT); + upper_bound = (double)((1ULL<<53)-1); +#else + upper_bound = MPD_MAXIMPORT-1; #endif x = (double)srclen * (log10(base)/MPD_RDIGITS); - return (x >= MPD_MAXIMPORT) ? MPD_SSIZE_MAX : (mpd_ssize_t)x + 1; + return (x > upper_bound) ? MPD_SSIZE_MAX : (mpd_ssize_t)x + 1; } static uint8_t @@ -8483,6 +8656,3 @@ mpd_qimport_u32(mpd_t *result, mpd_qresize(result, result->len, status); mpd_qfinalize(result, ctx, status); } - - - diff --git a/Modules/_decimal/libmpdec/mpdecimal.h b/Modules/_decimal/libmpdec/mpdecimal.h index a67dd9bc126c2..108b76efa8594 100644 --- a/Modules/_decimal/libmpdec/mpdecimal.h +++ b/Modules/_decimal/libmpdec/mpdecimal.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,59 +26,51 @@ */ -#ifndef MPDECIMAL_H -#define MPDECIMAL_H +#ifndef LIBMPDEC_MPDECIMAL_H_ +#define LIBMPDEC_MPDECIMAL_H_ +#ifndef _MSC_VER + #include "pyconfig.h" +#endif + #ifdef __cplusplus + #include + #include + #include + #include + #include extern "C" { - #ifndef __STDC_LIMIT_MACROS - #define __STDC_LIMIT_MACROS - #define MPD_CLEAR_STDC_LIMIT_MACROS - #endif +#else + #include + #include + #include + #include + #include #endif -#ifndef _MSC_VER - #include "pyconfig.h" +#if (defined(__linux__) || defined(__FreeBSD__) || defined(__APPLE__)) && \ + defined(__GNUC__) && __GNUC__ >= 4 && !defined(__INTEL_COMPILER) + #define MPD_PRAGMA(x) _Pragma(x) + #define MPD_HIDE_SYMBOLS_START "GCC visibility push(hidden)" + #define MPD_HIDE_SYMBOLS_END "GCC visibility pop" +#else + #define MPD_PRAGMA(x) + #define MPD_HIDE_SYMBOLS_START + #define MPD_HIDE_SYMBOLS_END #endif -#include -#include -#include -#include -#include -#include -#include +#if defined(__GNUC__) && !defined(__INTEL_COMPILER) + #define UNUSED __attribute__((unused)) +#else + #define UNUSED +#endif -#ifdef _MSC_VER +#if defined(_MSC_VER) #include "vccompat.h" - #ifndef UNUSED - #define UNUSED - #endif - #define MPD_PRAGMA(x) - #define MPD_HIDE_SYMBOLS_START - #define MPD_HIDE_SYMBOLS_END #define EXTINLINE extern inline #else - #ifndef __GNUC_STDC_INLINE__ - #define __GNUC_STDC_INLINE__ 1 - #endif - #if defined(__GNUC__) && !defined(__INTEL_COMPILER) - #define UNUSED __attribute__((unused)) - #else - #define UNUSED - #endif - #if (defined(__linux__) || defined(__FreeBSD__) || defined(__APPLE__)) && \ - defined(__GNUC__) && __GNUC__ >= 4 && !defined(__INTEL_COMPILER) - #define MPD_PRAGMA(x) _Pragma(x) - #define MPD_HIDE_SYMBOLS_START "GCC visibility push(hidden)" - #define MPD_HIDE_SYMBOLS_END "GCC visibility pop" - #else - #define MPD_PRAGMA(x) - #define MPD_HIDE_SYMBOLS_START - #define MPD_HIDE_SYMBOLS_END - #endif #define EXTINLINE #endif @@ -103,10 +95,10 @@ MPD_PRAGMA(MPD_HIDE_SYMBOLS_START) /******************************************************************************/ #define MPD_MAJOR_VERSION 2 -#define MPD_MINOR_VERSION 4 -#define MPD_MICRO_VERSION 2 +#define MPD_MINOR_VERSION 5 +#define MPD_MICRO_VERSION 0 -#define MPD_VERSION "2.4.2" +#define MPD_VERSION "2.5.0" #define MPD_VERSION_HEX ((MPD_MAJOR_VERSION << 24) | \ (MPD_MINOR_VERSION << 16) | \ @@ -423,6 +415,7 @@ void mpd_print(const mpd_t *dec); /* assignment from a string */ void mpd_qset_string(mpd_t *dec, const char *s, const mpd_context_t *ctx, uint32_t *status); +void mpd_qset_string_exact(mpd_t *dec, const char *s, uint32_t *status); /* set to NaN with error flags */ void mpd_seterror(mpd_t *result, uint32_t flags, uint32_t *status); @@ -440,6 +433,8 @@ void mpd_qset_u32(mpd_t *result, uint32_t a, const mpd_context_t *ctx, uint32_t #ifndef LEGACY_COMPILER void mpd_qset_i64(mpd_t *result, int64_t a, const mpd_context_t *ctx, uint32_t *status); void mpd_qset_u64(mpd_t *result, uint64_t a, const mpd_context_t *ctx, uint32_t *status); +void mpd_qset_i64_exact(mpd_t *result, int64_t a, uint32_t *status); +void mpd_qset_u64_exact(mpd_t *result, uint64_t a, uint32_t *status); #endif /* quietly assign a C integer type to an mpd_t with a static coefficient */ @@ -467,7 +462,8 @@ void mpd_qfinalize(mpd_t *result, const mpd_context_t *ctx, uint32_t *status); const char *mpd_class(const mpd_t *a, const mpd_context_t *ctx); -int mpd_qcopy(mpd_t *result, const mpd_t *a, uint32_t *status); +int mpd_qcopy(mpd_t *result, const mpd_t *a, uint32_t *status); +int mpd_qcopy_cxx(mpd_t *result, const mpd_t *a); mpd_t *mpd_qncopy(const mpd_t *a); int mpd_qcopy_abs(mpd_t *result, const mpd_t *a, uint32_t *status); int mpd_qcopy_negate(mpd_t *result, const mpd_t *a, uint32_t *status); @@ -721,7 +717,7 @@ EXTINLINE mpd_uint_t mpd_lsd(mpd_uint_t word); EXTINLINE mpd_ssize_t mpd_digits_to_size(mpd_ssize_t digits); /* number of digits in the exponent, undefined for MPD_SSIZE_MIN */ EXTINLINE int mpd_exp_digits(mpd_ssize_t exp); -EXTINLINE int mpd_iscanonical(const mpd_t *dec UNUSED); +EXTINLINE int mpd_iscanonical(const mpd_t *dec); EXTINLINE int mpd_isfinite(const mpd_t *dec); EXTINLINE int mpd_isinfinite(const mpd_t *dec); EXTINLINE int mpd_isinteger(const mpd_t *dec); @@ -833,15 +829,8 @@ MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ #ifdef __cplusplus - #ifdef MPD_CLEAR_STDC_LIMIT_MACROS - #undef MPD_CLEAR_STDC_LIMIT_MACROS - #undef __STDC_LIMIT_MACROS - #endif } /* END extern "C" */ #endif -#endif /* MPDECIMAL_H */ - - - +#endif /* LIBMPDEC_MPDECIMAL_H_ */ diff --git a/Modules/_decimal/libmpdec/numbertheory.c b/Modules/_decimal/libmpdec/numbertheory.c index 4e035477e2800..210e0deb37120 100644 --- a/Modules/_decimal/libmpdec/numbertheory.c +++ b/Modules/_decimal/libmpdec/numbertheory.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,11 +27,13 @@ #include "mpdecimal.h" -#include + #include +#include + #include "bits.h" -#include "umodarith.h" #include "numbertheory.h" +#include "umodarith.h" /* Bignum: Initialize the Number Theoretic Transform. */ @@ -128,5 +130,3 @@ _mpd_init_w3table(mpd_uint_t w3table[3], int sign, int modnum) w3table[1] = kernel; w3table[2] = POWMOD(kernel, 2); } - - diff --git a/Modules/_decimal/libmpdec/numbertheory.h b/Modules/_decimal/libmpdec/numbertheory.h index e94c157910c83..47b7753b831b8 100644 --- a/Modules/_decimal/libmpdec/numbertheory.h +++ b/Modules/_decimal/libmpdec/numbertheory.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,12 @@ */ -#ifndef NUMBER_THEORY_H -#define NUMBER_THEORY_H +#ifndef LIBMPDEC_NUMBERTHEORY_H_ +#define LIBMPDEC_NUMBERTHEORY_H_ -#include "constants.h" #include "mpdecimal.h" +#include "constants.h" /* Internal header file: all symbols have local scope in the DSO */ @@ -73,6 +73,4 @@ std_setmodulus(int modnum, mpd_uint_t *umod) MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif - - +#endif /* LIBMPDEC_NUMBERTHEORY_H_ */ diff --git a/Modules/_decimal/libmpdec/sixstep.c b/Modules/_decimal/libmpdec/sixstep.c index 92d513ebe1828..a4d1dbed7813c 100644 --- a/Modules/_decimal/libmpdec/sixstep.c +++ b/Modules/_decimal/libmpdec/sixstep.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,15 +27,17 @@ #include "mpdecimal.h" -#include -#include + #include +#include + #include "bits.h" +#include "constants.h" #include "difradix2.h" #include "numbertheory.h" +#include "sixstep.h" #include "transpose.h" #include "umodarith.h" -#include "sixstep.h" /* Bignum: Cache efficient Matrix Fourier Transform for arrays of the @@ -210,5 +212,3 @@ inv_six_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum) return 1; } - - diff --git a/Modules/_decimal/libmpdec/sixstep.h b/Modules/_decimal/libmpdec/sixstep.h index 4a8b015e3a9b9..89b4a33afc792 100644 --- a/Modules/_decimal/libmpdec/sixstep.h +++ b/Modules/_decimal/libmpdec/sixstep.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef SIX_STEP_H -#define SIX_STEP_H +#ifndef LIBMPDEC_SIXSTEP_H_ +#define LIBMPDEC_SIXSTEP_H_ #include "mpdecimal.h" -#include /* Internal header file: all symbols have local scope in the DSO */ @@ -45,4 +44,4 @@ int inv_six_step_fnt(mpd_uint_t *a, mpd_size_t n, int modnum); MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif +#endif /* LIBMPDEC_SIXSTEP_H_ */ diff --git a/Modules/_decimal/libmpdec/transpose.c b/Modules/_decimal/libmpdec/transpose.c index 55d6d89922790..56321b5f39a73 100644 --- a/Modules/_decimal/libmpdec/transpose.c +++ b/Modules/_decimal/libmpdec/transpose.c @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -27,15 +27,17 @@ #include "mpdecimal.h" + +#include +#include #include #include #include -#include -#include + #include "bits.h" #include "constants.h" -#include "typearith.h" #include "transpose.h" +#include "typearith.h" #define BUFSIZE 4096 @@ -272,5 +274,3 @@ transpose_pow2(mpd_uint_t *matrix, mpd_size_t rows, mpd_size_t cols) return 1; } - - diff --git a/Modules/_decimal/libmpdec/transpose.h b/Modules/_decimal/libmpdec/transpose.h index e1cd1fa17dd77..e91c18d74356b 100644 --- a/Modules/_decimal/libmpdec/transpose.h +++ b/Modules/_decimal/libmpdec/transpose.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,11 @@ */ -#ifndef TRANSPOSE_H -#define TRANSPOSE_H +#ifndef LIBMPDEC_TRANSPOSE_H_ +#define LIBMPDEC_TRANSPOSE_H_ #include "mpdecimal.h" -#include /* Internal header file: all symbols have local scope in the DSO */ @@ -59,4 +58,4 @@ static inline void pointerswap(mpd_uint_t **a, mpd_uint_t **b) MPD_PRAGMA(MPD_HIDE_SYMBOLS_END) /* restore previous scope rules */ -#endif +#endif /* LIBMPDEC_TRANSPOSE_H_ */ diff --git a/Modules/_decimal/libmpdec/typearith.h b/Modules/_decimal/libmpdec/typearith.h index 405237dac516a..47961788d7641 100644 --- a/Modules/_decimal/libmpdec/typearith.h +++ b/Modules/_decimal/libmpdec/typearith.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,14 @@ */ -#ifndef TYPEARITH_H -#define TYPEARITH_H +#ifndef LIBMPDEC_TYPEARITH_H_ +#define LIBMPDEC_TYPEARITH_H_ #include "mpdecimal.h" +#include + /*****************************************************************************/ /* Low level native arithmetic on basic types */ @@ -663,7 +665,4 @@ mulmod_size_t(mpd_size_t a, mpd_size_t b, mpd_size_t m) } -#endif /* TYPEARITH_H */ - - - +#endif /* LIBMPDEC_TYPEARITH_H_ */ diff --git a/Modules/_decimal/libmpdec/umodarith.h b/Modules/_decimal/libmpdec/umodarith.h index 68d15188cb39e..d7dbbbe6a7331 100644 --- a/Modules/_decimal/libmpdec/umodarith.h +++ b/Modules/_decimal/libmpdec/umodarith.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,12 +26,13 @@ */ -#ifndef UMODARITH_H -#define UMODARITH_H +#ifndef LIBMPDEC_UMODARITH_H_ +#define LIBMPDEC_UMODARITH_H_ -#include "constants.h" #include "mpdecimal.h" + +#include "constants.h" #include "typearith.h" @@ -644,7 +645,4 @@ ppro_powmod(mpd_uint_t base, mpd_uint_t exp, double *dmod, uint32_t *dinvmod) #endif /* CONFIG_32 */ -#endif /* UMODARITH_H */ - - - +#endif /* LIBMPDEC_UMODARITH_H_ */ diff --git a/Modules/_decimal/libmpdec/vccompat.h b/Modules/_decimal/libmpdec/vccompat.h index 2ba805dcc5646..e2e1c42cc0250 100644 --- a/Modules/_decimal/libmpdec/vccompat.h +++ b/Modules/_decimal/libmpdec/vccompat.h @@ -1,5 +1,5 @@ /* - * Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + * Copyright (c) 2008-2020 Stefan Krah. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions @@ -26,14 +26,16 @@ */ -#ifndef VCCOMPAT_H -#define VCCOMPAT_H +#ifndef LIBMPDEC_VCCOMPAT_H_ +#define LIBMPDEC_VCCOMPAT_H_ /* Visual C fixes: no snprintf ... */ #ifdef _MSC_VER - #undef inline - #define inline __inline + #ifndef __cplusplus + #undef inline + #define inline __inline + #endif #undef random #define random rand #undef srandom @@ -51,7 +53,4 @@ #endif -#endif /* VCCOMPAT_H */ - - - +#endif /* LIBMPDEC_VCCOMPAT_H_ */ diff --git a/Modules/_decimal/libmpdec/vcdiv64.asm b/Modules/_decimal/libmpdec/vcdiv64.asm index 6b6645673ab5a..597e9ba9352c8 100644 --- a/Modules/_decimal/libmpdec/vcdiv64.asm +++ b/Modules/_decimal/libmpdec/vcdiv64.asm @@ -1,5 +1,5 @@ ; -; Copyright (c) 2008-2016 Stefan Krah. All rights reserved. +; Copyright (c) 2008-2020 Stefan Krah. All rights reserved. ; ; Redistribution and use in source and binary forms, with or without ; modification, are permitted provided that the following conditions @@ -44,5 +44,3 @@ _mpd_div_words PROC _mpd_div_words ENDP _TEXT ENDS END - - From webhook-mailer at python.org Fri Jun 5 16:01:27 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Fri, 05 Jun 2020 20:01:27 -0000 Subject: [Python-checkins] Fix missing FloatOperation in EXTRA_FUNCTIONALITY path. (#20655) Message-ID: https://github.com/python/cpython/commit/5fe1df1886e2e53b04bf76ef916857271d3c8f20 commit: 5fe1df1886e2e53b04bf76ef916857271d3c8f20 branch: master author: Stefan Krah committer: GitHub date: 2020-06-05T22:01:18+02:00 summary: Fix missing FloatOperation in EXTRA_FUNCTIONALITY path. (#20655) files: M Lib/test/test_decimal.py diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index f1abd2aecb122..ed483a4709527 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -5201,6 +5201,7 @@ def test_c_signal_dict(self): DefaultContext = C.DefaultContext InvalidOperation = C.InvalidOperation + FloatOperation = C.FloatOperation DivisionByZero = C.DivisionByZero Overflow = C.Overflow Subnormal = C.Subnormal @@ -5274,6 +5275,7 @@ def assertIsExclusivelySet(signal, signal_dict): Underflow: C.DecUnderflow, Overflow: C.DecOverflow, DivisionByZero: C.DecDivisionByZero, + FloatOperation: C.DecFloatOperation, InvalidOperation: C.DecIEEEInvalidOperation } IntCond = [ From webhook-mailer at python.org Fri Jun 5 16:34:31 2020 From: webhook-mailer at python.org (Jason R. Coombs) Date: Fri, 05 Jun 2020 20:34:31 -0000 Subject: [Python-checkins] bpo-39791: Refresh importlib.metadata from importlib_metadata 1.6.1. (GH-20659) Message-ID: https://github.com/python/cpython/commit/161541ab45278df6603dd870113b10f13e4d9e16 commit: 161541ab45278df6603dd870113b10f13e4d9e16 branch: master author: Jason R. Coombs committer: GitHub date: 2020-06-05T16:34:16-04:00 summary: bpo-39791: Refresh importlib.metadata from importlib_metadata 1.6.1. (GH-20659) * Refresh importlib.metadata from importlib_metadata 1.6.1. * ?? Added by blurb_it. Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Misc/NEWS.d/next/Library/2020-06-05-19-29-10.bpo-39791._CcO3d.rst M Doc/library/importlib.metadata.rst M Lib/importlib/metadata.py M Lib/test/test_importlib/fixtures.py M Lib/test/test_importlib/test_main.py M Lib/test/test_importlib/test_zip.py diff --git a/Doc/library/importlib.metadata.rst b/Doc/library/importlib.metadata.rst index 15e58b860d97d..21da143f3bebf 100644 --- a/Doc/library/importlib.metadata.rst +++ b/Doc/library/importlib.metadata.rst @@ -77,7 +77,9 @@ Entry points The ``entry_points()`` function returns a dictionary of all entry points, keyed by group. Entry points are represented by ``EntryPoint`` instances; each ``EntryPoint`` has a ``.name``, ``.group``, and ``.value`` attributes and -a ``.load()`` method to resolve the value. +a ``.load()`` method to resolve the value. There are also ``.module``, +``.attr``, and ``.extras`` attributes for getting the components of the +``.value`` attribute:: >>> eps = entry_points() # doctest: +SKIP >>> list(eps) # doctest: +SKIP @@ -86,6 +88,12 @@ a ``.load()`` method to resolve the value. >>> wheel = [ep for ep in scripts if ep.name == 'wheel'][0] # doctest: +SKIP >>> wheel # doctest: +SKIP EntryPoint(name='wheel', value='wheel.cli:main', group='console_scripts') + >>> wheel.module # doctest: +SKIP + 'wheel.cli' + >>> wheel.attr # doctest: +SKIP + 'main' + >>> wheel.extras # doctest: +SKIP + [] >>> main = wheel.load() # doctest: +SKIP >>> main # doctest: +SKIP @@ -94,7 +102,7 @@ The ``group`` and ``name`` are arbitrary values defined by the package author and usually a client will wish to resolve all entry points for a particular group. Read `the setuptools docs `_ -for more information on entrypoints, their definition, and usage. +for more information on entry points, their definition, and usage. .. _metadata: @@ -235,7 +243,7 @@ method:: """ The ``DistributionFinder.Context`` object provides ``.path`` and ``.name`` -properties indicating the path to search and names to match and may +properties indicating the path to search and name to match and may supply other relevant context. What this means in practice is that to support finding distribution package diff --git a/Lib/importlib/metadata.py b/Lib/importlib/metadata.py index 831f593277ccd..ffa0cba45706d 100644 --- a/Lib/importlib/metadata.py +++ b/Lib/importlib/metadata.py @@ -78,6 +78,16 @@ def load(self): attrs = filter(None, (match.group('attr') or '').split('.')) return functools.reduce(getattr, attrs, module) + @property + def module(self): + match = self.pattern.match(self.value) + return match.group('module') + + @property + def attr(self): + match = self.pattern.match(self.value) + return match.group('attr') + @property def extras(self): match = self.pattern.match(self.value) @@ -170,7 +180,7 @@ def from_name(cls, name): """ for resolver in cls._discover_resolvers(): dists = resolver(DistributionFinder.Context(name=name)) - dist = next(dists, None) + dist = next(iter(dists), None) if dist is not None: return dist else: @@ -213,6 +223,17 @@ def _discover_resolvers(): ) return filter(None, declared) + @classmethod + def _local(cls, root='.'): + from pep517 import build, meta + system = build.compat_system(root) + builder = functools.partial( + meta.build, + source_dir=root, + system=system, + ) + return PathDistribution(zipfile.Path(meta.build_as_zip(builder))) + @property def metadata(self): """Return the parsed metadata for this Distribution. @@ -391,7 +412,7 @@ class FastPath: def __init__(self, root): self.root = root - self.base = os.path.basename(root).lower() + self.base = os.path.basename(self.root).lower() def joinpath(self, child): return pathlib.Path(self.root, child) @@ -408,8 +429,8 @@ def zip_children(self): names = zip_path.root.namelist() self.joinpath = zip_path.joinpath - return ( - posixpath.split(child)[0] + return dict.fromkeys( + child.split(posixpath.sep, 1)[0] for child in names ) @@ -475,7 +496,6 @@ def _search_paths(cls, name, paths): ) - class PathDistribution(Distribution): def __init__(self, path): """Construct a distribution from a path to the metadata directory. diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py index d923cec26ea8f..b25febb7fe756 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/fixtures.py @@ -161,6 +161,21 @@ def setUp(self): build_files(EggInfoFile.files, prefix=self.site_dir) +class LocalPackage: + files = { + "setup.py": """ + import setuptools + setuptools.setup(name="local-pkg", version="2.0.1") + """, + } + + def setUp(self): + self.fixtures = contextlib.ExitStack() + self.addCleanup(self.fixtures.close) + self.fixtures.enter_context(tempdir_as_cwd()) + build_files(self.files) + + def build_files(file_defs, prefix=pathlib.Path()): """Build a set of files/directories, as described by the diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py index 42a79992ecc8c..7b18c3de16eea 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/test_main.py @@ -246,3 +246,19 @@ def test_json_dump(self): """ with self.assertRaises(Exception): json.dumps(self.ep) + + def test_module(self): + assert self.ep.module == 'value' + + def test_attr(self): + assert self.ep.attr is None + + +class FileSystem(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): + def test_unicode_dir_on_sys_path(self): + """ + Ensure a Unicode subdirectory of a directory on sys.path + does not crash. + """ + fixtures.build_files({'?': {}}, prefix=self.site_dir) + list(distributions()) diff --git a/Lib/test/test_importlib/test_zip.py b/Lib/test/test_importlib/test_zip.py index fa87cd7cb1096..a5399c16682fb 100644 --- a/Lib/test/test_importlib/test_zip.py +++ b/Lib/test/test_importlib/test_zip.py @@ -3,9 +3,10 @@ from contextlib import ExitStack from importlib.metadata import ( - distribution, entry_points, files, PackageNotFoundError, version, + distribution, entry_points, files, PackageNotFoundError, + version, distributions, ) -from importlib.resources import path +from importlib import resources from test.support import requires_zlib @@ -14,15 +15,19 @@ class TestZip(unittest.TestCase): root = 'test.test_importlib.data' + def _fixture_on_path(self, filename): + pkg_file = resources.files(self.root).joinpath(filename) + file = self.resources.enter_context(resources.as_file(pkg_file)) + assert file.name.startswith('example-'), file.name + sys.path.insert(0, str(file)) + self.resources.callback(sys.path.pop, 0) + def setUp(self): # Find the path to the example-*.whl so we can add it to the front of # sys.path, where we'll then try to find the metadata thereof. self.resources = ExitStack() self.addCleanup(self.resources.close) - wheel = self.resources.enter_context( - path(self.root, 'example-21.12-py3-none-any.whl')) - sys.path.insert(0, str(wheel)) - self.resources.callback(sys.path.pop, 0) + self._fixture_on_path('example-21.12-py3-none-any.whl') def test_zip_version(self): self.assertEqual(version('example'), '21.12') @@ -49,6 +54,10 @@ def test_files(self): path = str(file.dist.locate_file(file)) assert '.whl/' in path, path + def test_one_distribution(self): + dists = list(distributions(path=sys.path[:1])) + assert len(dists) == 1 + @requires_zlib() class TestEgg(TestZip): @@ -57,10 +66,7 @@ def setUp(self): # sys.path, where we'll then try to find the metadata thereof. self.resources = ExitStack() self.addCleanup(self.resources.close) - egg = self.resources.enter_context( - path(self.root, 'example-21.12-py3.6.egg')) - sys.path.insert(0, str(egg)) - self.resources.callback(sys.path.pop, 0) + self._fixture_on_path('example-21.12-py3.6.egg') def test_files(self): for file in files('example'): diff --git a/Misc/NEWS.d/next/Library/2020-06-05-19-29-10.bpo-39791._CcO3d.rst b/Misc/NEWS.d/next/Library/2020-06-05-19-29-10.bpo-39791._CcO3d.rst new file mode 100644 index 0000000000000..73e0cbb013f84 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-05-19-29-10.bpo-39791._CcO3d.rst @@ -0,0 +1 @@ +Refresh importlib.metadata from importlib_metadata 1.6.1. \ No newline at end of file From webhook-mailer at python.org Fri Jun 5 16:56:14 2020 From: webhook-mailer at python.org (Ram Rachum) Date: Fri, 05 Jun 2020 20:56:14 -0000 Subject: [Python-checkins] bpo-40876: Clarify error message in the csv module (GH-20653) Message-ID: https://github.com/python/cpython/commit/235f918f44bb89e27190db2f1823d191dbd4ad28 commit: 235f918f44bb89e27190db2f1823d191dbd4ad28 branch: master author: Ram Rachum committer: GitHub date: 2020-06-05T17:56:06-03:00 summary: bpo-40876: Clarify error message in the csv module (GH-20653) files: A Misc/NEWS.d/next/Library/2020-06-05-20-00-18.bpo-40876.zDhiZj.rst M Modules/_csv.c diff --git a/Misc/NEWS.d/next/Library/2020-06-05-20-00-18.bpo-40876.zDhiZj.rst b/Misc/NEWS.d/next/Library/2020-06-05-20-00-18.bpo-40876.zDhiZj.rst new file mode 100644 index 0000000000000..75f62addbabbc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-05-20-00-18.bpo-40876.zDhiZj.rst @@ -0,0 +1 @@ +Clarify error message in the :mod:`csv` module. diff --git a/Modules/_csv.c b/Modules/_csv.c index f33733aaf850d..7e44419c0876b 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -810,7 +810,7 @@ Reader_iternext(ReaderObj *self) PyErr_Format(_csvstate_global->error_obj, "iterator should return strings, " "not %.200s " - "(did you open the file in text mode?)", + "(the file should be opened in text mode)", Py_TYPE(lineobj)->tp_name ); Py_DECREF(lineobj); From webhook-mailer at python.org Fri Jun 5 17:32:18 2020 From: webhook-mailer at python.org (Erlend Egeberg Aasland) Date: Fri, 05 Jun 2020 21:32:18 -0000 Subject: [Python-checkins] bpo-40867: Remove unused include from Module/_randommodule.c (GH-20635) Message-ID: https://github.com/python/cpython/commit/45af786e111aed5f687e1f0d8b45b6a5e678a6bc commit: 45af786e111aed5f687e1f0d8b45b6a5e678a6bc branch: master author: Erlend Egeberg Aasland committer: GitHub date: 2020-06-05T14:32:09-07:00 summary: bpo-40867: Remove unused include from Module/_randommodule.c (GH-20635) files: M Modules/_randommodule.c diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c index 3589173edcb62..3e3139e4990cc 100644 --- a/Modules/_randommodule.c +++ b/Modules/_randommodule.c @@ -67,7 +67,6 @@ /* ---------------------------------------------------------------*/ #include "Python.h" -#include "pycore_byteswap.h" // _Py_bswap32() #ifdef HAVE_PROCESS_H # include // getpid() #endif From webhook-mailer at python.org Fri Jun 5 19:52:20 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 05 Jun 2020 23:52:20 -0000 Subject: [Python-checkins] bpo-40883: Fix memory leak in fstring_compile_expr in parse_string.c (GH-20667) Message-ID: https://github.com/python/cpython/commit/a54096e30523534e8eebb8dc1011b4536ed237a8 commit: a54096e30523534e8eebb8dc1011b4536ed237a8 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-06T00:52:15+01:00 summary: bpo-40883: Fix memory leak in fstring_compile_expr in parse_string.c (GH-20667) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-05-23-25-00.bpo-40883.M6sQ-Q.rst M Parser/pegen/parse_string.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-05-23-25-00.bpo-40883.M6sQ-Q.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-05-23-25-00.bpo-40883.M6sQ-Q.rst new file mode 100644 index 0000000000000..ebeb0cc60d16b --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-05-23-25-00.bpo-40883.M6sQ-Q.rst @@ -0,0 +1 @@ +Fix memory leak in when parsing f-strings in the new parser. Patch by Pablo Galindo \ No newline at end of file diff --git a/Parser/pegen/parse_string.c b/Parser/pegen/parse_string.c index e24ecc58d3aa1..efe82df47658b 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/pegen/parse_string.c @@ -604,6 +604,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, struct tok_state* tok = PyTokenizer_FromString(str, 1); if (tok == NULL) { + PyMem_RawFree(str); return NULL; } Py_INCREF(p->tok->filename); @@ -629,6 +630,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, result = expr; exit: + PyMem_RawFree(str); _PyPegen_Parser_Free(p2); PyTokenizer_Free(tok); return result; From webhook-mailer at python.org Fri Jun 5 19:52:32 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 05 Jun 2020 23:52:32 -0000 Subject: [Python-checkins] bpo-40880: Fix invalid read in newline_in_string in pegen.c (#20666) Message-ID: https://github.com/python/cpython/commit/2e6593db0086004a1ca7f7049218ff9573d473c2 commit: 2e6593db0086004a1ca7f7049218ff9573d473c2 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-06T00:52:27+01:00 summary: bpo-40880: Fix invalid read in newline_in_string in pegen.c (#20666) * bpo-40880: Fix invalid read in newline_in_string in pegen.c * Update Parser/pegen/pegen.c Co-authored-by: Lysandros Nikolaou * Add NEWS entry Co-authored-by: Lysandros Nikolaou files: A Misc/NEWS.d/next/Core and Builtins/2020-06-06-00-23-19.bpo-40880.fjdzSh.rst M Parser/pegen/pegen.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-06-00-23-19.bpo-40880.fjdzSh.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-06-00-23-19.bpo-40880.fjdzSh.rst new file mode 100644 index 0000000000000..ab42f5c205f81 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-06-00-23-19.bpo-40880.fjdzSh.rst @@ -0,0 +1,2 @@ +Fix invalid memory read in the new parser when checking newlines in string +literals. Patch by Pablo Galindo. diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index c55ff7e45c0da..afe75d7f862ee 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -937,8 +937,8 @@ _PyPegen_number_token(Parser *p) static int // bool newline_in_string(Parser *p, const char *cur) { - for (char c = *cur; cur >= p->tok->buf; c = *--cur) { - if (c == '\'' || c == '"') { + for (const char *c = cur; c >= p->tok->buf; c--) { + if (*c == '\'' || *c == '"') { return 1; } } From webhook-mailer at python.org Sat Jun 6 00:21:48 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sat, 06 Jun 2020 04:21:48 -0000 Subject: [Python-checkins] Refactor scripts in Tools/peg_generator/scripts (GH-20401) Message-ID: https://github.com/python/cpython/commit/ba6fd87e41dceb01dcdacc57c722aca12cde42a9 commit: ba6fd87e41dceb01dcdacc57c722aca12cde42a9 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-05T21:21:40-07:00 summary: Refactor scripts in Tools/peg_generator/scripts (GH-20401) files: M Modules/_peg_parser.c M Tools/peg_generator/Makefile M Tools/peg_generator/scripts/benchmark.py M Tools/peg_generator/scripts/grammar_grapher.py M Tools/peg_generator/scripts/show_parse.py M Tools/peg_generator/scripts/test_parse_directory.py M Tools/peg_generator/scripts/test_pypi_packages.py diff --git a/Modules/_peg_parser.c b/Modules/_peg_parser.c index b66d5a83a84f6..ca2a3cf7b5fd8 100644 --- a/Modules/_peg_parser.c +++ b/Modules/_peg_parser.c @@ -80,14 +80,15 @@ _Py_compile_string(PyObject *self, PyObject *args, PyObject *kwds) PyObject * _Py_parse_string(PyObject *self, PyObject *args, PyObject *kwds) { - static char *keywords[] = {"string", "filename", "mode", "oldparser", NULL}; + static char *keywords[] = {"string", "filename", "mode", "oldparser", "ast", NULL}; char *the_string; char *filename = ""; char *mode_str = "exec"; int oldparser = 0; + int ast = 1; - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|ssp", keywords, - &the_string, &filename, &mode_str, &oldparser)) { + if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|sspp", keywords, + &the_string, &filename, &mode_str, &oldparser, &ast)) { return NULL; } @@ -110,7 +111,14 @@ _Py_parse_string(PyObject *self, PyObject *args, PyObject *kwds) return NULL; } - PyObject *result = PyAST_mod2obj(mod); + PyObject *result; + if (ast) { + result = PyAST_mod2obj(mod); + } + else { + Py_INCREF(Py_None); + result = Py_None; + } PyArena_Free(arena); return result; } diff --git a/Tools/peg_generator/Makefile b/Tools/peg_generator/Makefile index e7a190c1bcd13..fb727c048b311 100644 --- a/Tools/peg_generator/Makefile +++ b/Tools/peg_generator/Makefile @@ -70,23 +70,21 @@ stats: peg_extension/parse.c data/xxl.py time: time_compile time_compile: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl compile + $(VENVPYTHON) scripts/benchmark.py --parser=new --target=xxl compile time_parse: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=pegen --target=xxl parse + $(VENVPYTHON) scripts/benchmark.py --parser=new --target=xxl parse time_old: time_old_compile time_old_compile: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=cpython --target=xxl compile + $(VENVPYTHON) scripts/benchmark.py --parser=old --target=xxl compile time_old_parse: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=cpython --target=xxl parse + $(VENVPYTHON) scripts/benchmark.py --parser=old --target=xxl parse time_peg_dir: venv $(VENVPYTHON) scripts/test_parse_directory.py \ - --grammar-file $(GRAMMAR) \ - --tokens-file $(TOKENS) \ -d $(TESTDIR) \ $(TESTFLAGS) \ --exclude "*/failset/*" \ @@ -95,12 +93,8 @@ time_peg_dir: venv time_stdlib: $(CPYTHON) venv $(VENVPYTHON) scripts/test_parse_directory.py \ - --grammar-file $(GRAMMAR) \ - --tokens-file $(TOKENS) \ -d $(CPYTHON) \ $(TESTFLAGS) \ - --exclude "*/test2to3/*" \ - --exclude "*/test2to3/**/*" \ --exclude "*/bad*" \ --exclude "*/lib2to3/tests/data/*" diff --git a/Tools/peg_generator/scripts/benchmark.py b/Tools/peg_generator/scripts/benchmark.py index 71512c22a355b..af356bed78391 100644 --- a/Tools/peg_generator/scripts/benchmark.py +++ b/Tools/peg_generator/scripts/benchmark.py @@ -24,7 +24,7 @@ argparser.add_argument( "--parser", action="store", - choices=["pegen", "cpython"], + choices=["new", "old"], default="pegen", help="Which parser to benchmark (default is pegen)", ) @@ -40,7 +40,12 @@ command_compile = subcommands.add_parser( "compile", help="Benchmark parsing and compiling to bytecode" ) -command_parse = subcommands.add_parser("parse", help="Benchmark parsing and generating an ast.AST") +command_parse = subcommands.add_parser( + "parse", help="Benchmark parsing and generating an ast.AST" +) +command_notree = subcommands.add_parser( + "notree", help="Benchmark parsing and dumping the tree" +) def benchmark(func): @@ -62,7 +67,7 @@ def wrapper(*args): @benchmark def time_compile(source, parser): - if parser == "cpython": + if parser == "old": return _peg_parser.compile_string( source, oldparser=True, @@ -73,32 +78,40 @@ def time_compile(source, parser): @benchmark def time_parse(source, parser): - if parser == "cpython": + if parser == "old": return _peg_parser.parse_string(source, oldparser=True) else: return _peg_parser.parse_string(source) + at benchmark +def time_notree(source, parser): + if parser == "old": + return _peg_parser.parse_string(source, oldparser=True, ast=False) + else: + return _peg_parser.parse_string(source, ast=False) + + def run_benchmark_xxl(subcommand, parser, source): if subcommand == "compile": time_compile(source, parser) elif subcommand == "parse": time_parse(source, parser) + elif subcommand == "notree": + time_notree(source, parser) def run_benchmark_stdlib(subcommand, parser): + modes = {"compile": 2, "parse": 1, "notree": 0} for _ in range(3): parse_directory( "../../Lib", - "../../Grammar/python.gram", - "../../Grammar/Tokens", verbose=False, excluded_files=["*/bad*", "*/lib2to3/tests/data/*",], - skip_actions=False, tree_arg=0, short=True, - mode=2 if subcommand == "compile" else 1, - parser=parser, + mode=modes[subcommand], + oldparser=(parser == "old"), ) diff --git a/Tools/peg_generator/scripts/grammar_grapher.py b/Tools/peg_generator/scripts/grammar_grapher.py index 3aa25466c70d4..4afdbce8f966f 100755 --- a/Tools/peg_generator/scripts/grammar_grapher.py +++ b/Tools/peg_generator/scripts/grammar_grapher.py @@ -42,6 +42,13 @@ ) argparser = argparse.ArgumentParser(prog="graph_grammar", description="Graph a grammar tree",) +argparser.add_argument( + "-s", + "--start", + choices=["exec", "eval", "single"], + default="exec", + help="Choose the grammar's start rule (exec, eval or single)", +) argparser.add_argument("grammar_file", help="The grammar file to graph") @@ -91,19 +98,15 @@ def main() -> None: references[name] = set(references_for_item(rule)) # Flatten the start node if has only a single reference - root_node = "start" - if start := references["start"]: - if len(start) == 1: - root_node = list(start)[0] - del references["start"] + root_node = {"exec": "file", "eval": "eval", "single": "interactive"}[args.start] print("digraph g1 {") print('\toverlap="scale";') # Force twopi to scale the graph to avoid overlaps print(f'\troot="{root_node}";') - print(f"\t{root_node} [color=green, shape=circle]") + print(f"\t{root_node} [color=green, shape=circle];") for name, refs in references.items(): - if refs: # Ignore empty sets - print(f"\t{name} -> {','.join(refs)};") + for ref in refs: + print(f"\t{name} -> {ref};") print("}") diff --git a/Tools/peg_generator/scripts/show_parse.py b/Tools/peg_generator/scripts/show_parse.py index 1c1996f40f74e..b4ee5a1b357f7 100755 --- a/Tools/peg_generator/scripts/show_parse.py +++ b/Tools/peg_generator/scripts/show_parse.py @@ -41,7 +41,13 @@ parser.add_argument( "-d", "--diff", action="store_true", help="show diff between grammar and ast (requires -g)" ) -parser.add_argument("-g", "--grammar-file", help="grammar to use (default: use the ast module)") +parser.add_argument( + "-p", + "--parser", + choices=["new", "old"], + default="new", + help="choose the parser to use" +) parser.add_argument( "-m", "--multiline", @@ -84,19 +90,18 @@ def print_parse(source: str, verbose: bool = False) -> None: def main() -> None: args = parser.parse_args() - if args.diff and not args.grammar_file: - parser.error("-d/--diff requires -g/--grammar-file") + new_parser = args.parser == "new" if args.multiline: sep = "\n" else: sep = " " program = sep.join(args.program) - if args.grammar_file: + if new_parser: tree = _peg_parser.parse_string(program) if args.diff: - a = tree - b = _peg_parser.parse_string(program, oldparser=True) + a = _peg_parser.parse_string(program, oldparser=True) + b = tree diff = diff_trees(a, b, args.verbose) if diff: for line in diff: @@ -104,11 +109,11 @@ def main() -> None: else: print("# Trees are the same") else: - print(f"# Parsed using {args.grammar_file}") + print("# Parsed using the new parser") print(format_tree(tree, args.verbose)) else: tree = _peg_parser.parse_string(program, oldparser=True) - print("# Parse using the old parser") + print("# Parsed using the old parser") print(format_tree(tree, args.verbose)) diff --git a/Tools/peg_generator/scripts/test_parse_directory.py b/Tools/peg_generator/scripts/test_parse_directory.py index e88afe1539ce1..63204ce9dc193 100755 --- a/Tools/peg_generator/scripts/test_parse_directory.py +++ b/Tools/peg_generator/scripts/test_parse_directory.py @@ -11,7 +11,7 @@ from glob import glob from pathlib import PurePath -from typing import List, Optional, Any +from typing import List, Optional, Any, Tuple sys.path.insert(0, os.getcwd()) from pegen.ast_dump import ast_dump @@ -22,13 +22,15 @@ FAIL = "\033[91m" ENDC = "\033[0m" +COMPILE = 2 +PARSE = 1 +NOTREE = 0 + argparser = argparse.ArgumentParser( prog="test_parse_directory", description="Helper program to test directories or files for pegen", ) argparser.add_argument("-d", "--directory", help="Directory path containing files to test") -argparser.add_argument("--grammar-file", help="Grammar file path") -argparser.add_argument("--tokens-file", help="Tokens file path") argparser.add_argument( "-e", "--exclude", action="append", default=[], help="Glob(s) for matching files to exclude" ) @@ -38,9 +40,6 @@ argparser.add_argument( "-v", "--verbose", action="store_true", help="Display detailed errors for failures" ) -argparser.add_argument( - "--skip-actions", action="store_true", help="Suppress code emission for rule actions", -) argparser.add_argument( "-t", "--tree", action="count", help="Compare parse tree to official AST", default=0 ) @@ -113,92 +112,35 @@ def compare_trees( return 1 -def parse_directory( - directory: str, - grammar_file: str, - tokens_file: str, - verbose: bool, - excluded_files: List[str], - skip_actions: bool, - tree_arg: int, - short: bool, - mode: int, - parser: str, -) -> int: - if parser == "cpython" and (tree_arg or mode == 0): - print("Cannot specify tree argument or mode=0 with the cpython parser.", file=sys.stderr) - return 1 - - if not directory: - print("You must specify a directory of files to test.", file=sys.stderr) - return 1 - - if grammar_file and tokens_file: - if not os.path.exists(grammar_file): - print(f"The specified grammar file, {grammar_file}, does not exist.", file=sys.stderr) - return 1 +def parse_file(source: str, file: str, mode: int, oldparser: bool) -> Tuple[Any, float]: + t0 = time.time() + if mode == COMPILE: + result = _peg_parser.compile_string( + source, + filename=file, + oldparser=oldparser, + ) else: - print( - "A grammar file or a tokens file was not provided - attempting to use existing parser from stdlib...\n" + result = _peg_parser.parse_string( + source, + filename=file, + oldparser=oldparser, + ast=(mode == PARSE), ) + t1 = time.time() + return result, t1 - t0 - if tree_arg: - assert mode == 1, "Mode should be 1 (parse), when comparing the generated trees" - # For a given directory, traverse files and attempt to parse each one - # - Output success/failure for each file - errors = 0 - files = [] - trees = {} # Trees to compare (after everything else is done) - total_seconds = 0 +def is_parsing_failure(source: str) -> bool: + try: + _peg_parser.parse_string(source, mode="exec", oldparser=True) + except SyntaxError: + return False + return True - for file in sorted(glob(f"{directory}/**/*.py", recursive=True)): - # Only attempt to parse Python files and files that are not excluded - should_exclude_file = False - for pattern in excluded_files: - if PurePath(file).match(pattern): - should_exclude_file = True - break - - if not should_exclude_file: - with tokenize.open(file) as f: - source = f.read() - try: - t0 = time.time() - if mode == 2: - result = _peg_parser.compile_string( - source, - filename=file, - oldparser=parser == "cpython", - ) - else: - result = _peg_parser.parse_string( - source, - filename=file, - oldparser=parser == "cpython" - ) - t1 = time.time() - total_seconds += (t1 - t0) - if tree_arg: - trees[file] = result - if not short: - report_status(succeeded=True, file=file, verbose=verbose) - except Exception as error: - try: - _peg_parser.parse_string(source, mode="exec", oldparser=True) - except Exception: - if not short: - print(f"File {file} cannot be parsed by either pegen or the ast module.") - else: - report_status( - succeeded=False, file=file, verbose=verbose, error=error, short=short - ) - errors += 1 - files.append(file) - t1 = time.time() +def generate_time_stats(files, total_seconds) -> None: total_files = len(files) - total_bytes = 0 total_lines = 0 for file in files: @@ -217,6 +159,57 @@ def parse_directory( f"or {total_bytes / total_seconds :,.0f} bytes/sec.", ) + +def parse_directory( + directory: str, + verbose: bool, + excluded_files: List[str], + tree_arg: int, + short: bool, + mode: int, + oldparser: bool, +) -> int: + if tree_arg: + assert mode == PARSE, "Mode should be 1 (parse), when comparing the generated trees" + + if oldparser and tree_arg: + print("Cannot specify tree argument with the cpython parser.", file=sys.stderr) + return 1 + + # For a given directory, traverse files and attempt to parse each one + # - Output success/failure for each file + errors = 0 + files = [] + trees = {} # Trees to compare (after everything else is done) + total_seconds = 0 + + for file in sorted(glob(f"{directory}/**/*.py", recursive=True)): + # Only attempt to parse Python files and files that are not excluded + if any(PurePath(file).match(pattern) for pattern in excluded_files): + continue + + with tokenize.open(file) as f: + source = f.read() + + try: + result, dt = parse_file(source, file, mode, oldparser) + total_seconds += dt + if tree_arg: + trees[file] = result + report_status(succeeded=True, file=file, verbose=verbose, short=short) + except SyntaxError as error: + if is_parsing_failure(source): + print(f"File {file} cannot be parsed by either parser.") + else: + report_status( + succeeded=False, file=file, verbose=verbose, error=error, short=short + ) + errors += 1 + files.append(file) + + t1 = time.time() + + generate_time_stats(files, total_seconds) if short: print_memstats() @@ -240,26 +233,20 @@ def parse_directory( def main() -> None: args = argparser.parse_args() directory = args.directory - grammar_file = args.grammar_file - tokens_file = args.tokens_file verbose = args.verbose excluded_files = args.exclude - skip_actions = args.skip_actions tree = args.tree short = args.short mode = 1 if args.tree else 2 sys.exit( parse_directory( directory, - grammar_file, - tokens_file, verbose, excluded_files, - skip_actions, tree, short, mode, - "pegen", + oldparser=False, ) ) diff --git a/Tools/peg_generator/scripts/test_pypi_packages.py b/Tools/peg_generator/scripts/test_pypi_packages.py index 98f77785cdd1c..f014753b3cd23 100755 --- a/Tools/peg_generator/scripts/test_pypi_packages.py +++ b/Tools/peg_generator/scripts/test_pypi_packages.py @@ -57,22 +57,11 @@ def find_dirname(package_name: str) -> str: def run_tests(dirname: str, tree: int) -> int: return test_parse_directory.parse_directory( dirname, - HERE / ".." / ".." / ".." / "Grammar" / "python.gram", - HERE / ".." / ".." / ".." / "Grammar" / "Tokens", verbose=False, - excluded_files=[ - "*/failset/*", - "*/failset/**", - "*/failset/**/*", - "*/test2to3/*", - "*/test2to3/**/*", - "*/bad*", - "*/lib2to3/tests/data/*", - ], - skip_actions=False, + excluded_files=[], tree_arg=tree, short=True, - mode=1, + mode=1 if tree else 0, parser="pegen", ) From webhook-mailer at python.org Sat Jun 6 04:24:50 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 06 Jun 2020 08:24:50 -0000 Subject: [Python-checkins] bpo-40807: Show warnings once from codeop._maybe_compile (GH-20486) Message-ID: https://github.com/python/cpython/commit/c067183605cf84bb1a246635f52827251d0476f8 commit: c067183605cf84bb1a246635f52827251d0476f8 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-06T01:24:45-07:00 summary: bpo-40807: Show warnings once from codeop._maybe_compile (GH-20486) * bpo-40807: Show warnings once from codeop._maybe_compile * Move catch_warnings * news Co-authored-by: Terry Jan Reedy (cherry picked from commit 052d3fc0907be253cfd64b2c737a0b0aca586011) Co-authored-by: Cheryl Sabella files: A Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst M Lib/codeop.py M Lib/test/test_codeop.py diff --git a/Lib/codeop.py b/Lib/codeop.py index 3c37f35eb0250..3c2bb6083561e 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -57,6 +57,7 @@ """ import __future__ +import warnings _features = [getattr(__future__, fname) for fname in __future__.all_feature_names] @@ -83,15 +84,18 @@ def _maybe_compile(compiler, source, filename, symbol): except SyntaxError as err: pass - try: - code1 = compiler(source + "\n", filename, symbol) - except SyntaxError as e: - err1 = e - - try: - code2 = compiler(source + "\n\n", filename, symbol) - except SyntaxError as e: - err2 = e + # Suppress warnings after the first compile to avoid duplication. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + code1 = compiler(source + "\n", filename, symbol) + except SyntaxError as e: + err1 = e + + try: + code2 = compiler(source + "\n\n", filename, symbol) + except SyntaxError as e: + err2 = e try: if code: diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 4d52d15fa0fb3..1e57ab9d51e2c 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -294,6 +294,11 @@ def test_filename(self): self.assertNotEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "def", 'single').co_filename) + def test_warning(self): + # Test that the warning is only returned once. + with support.check_warnings((".*literal", SyntaxWarning)) as w: + compile_command("0 is 0") + self.assertEqual(len(w.warnings), 1) if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst b/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst new file mode 100644 index 0000000000000..532b809b77eed --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst @@ -0,0 +1,2 @@ +Stop codeop._maybe_compile, used by code.InteractiveInterpreter (and IDLE). +from from emitting each warning three times. From webhook-mailer at python.org Sat Jun 6 06:09:06 2020 From: webhook-mailer at python.org (Shantanu) Date: Sat, 06 Jun 2020 10:09:06 -0000 Subject: [Python-checkins] bpo-40614: Respect feature version for f-string debug expressions (GH-20196) (GH-20466) Message-ID: https://github.com/python/cpython/commit/f7ed4d4e83f5d9e85e244a1cbc460f26436ab24d commit: f7ed4d4e83f5d9e85e244a1cbc460f26436ab24d branch: 3.8 author: Shantanu committer: GitHub date: 2020-06-06T11:08:48+01:00 summary: bpo-40614: Respect feature version for f-string debug expressions (GH-20196) (GH-20466) Co-authored-by: Lysandros Nikolaou Co-authored-by: Pablo Galindo (cherry picked from commit c116c94ff119485761460f1033cdee425bed0310) files: A Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst M Lib/test/test_ast.py M Python/ast.c diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 486f2aa707e83..869346664499c 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -630,6 +630,12 @@ def test_issue39579_dotted_name_end_col_offset(self): attr_b = tree.body[0].decorator_list[0].value self.assertEqual(attr_b.end_col_offset, 4) + def test_issue40614_feature_version(self): + ast.parse('f"{x=}"', feature_version=(3, 8)) + with self.assertRaises(SyntaxError): + ast.parse('f"{x=}"', feature_version=(3, 7)) + + class ASTHelpers_Test(unittest.TestCase): maxDiff = None diff --git a/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst b/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst new file mode 100644 index 0000000000000..238b98c14a326 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst @@ -0,0 +1 @@ +:func:`ast.parse` will not parse self documenting expressions in f-strings when passed ``feature_version`` is less than ``(3, 8)``. diff --git a/Python/ast.c b/Python/ast.c index f70d48ba3a15d..594879bd0ef0b 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -5202,6 +5202,12 @@ fstring_find_expr(const char **str, const char *end, int raw, int recurse_lvl, /* Check for =, which puts the text value of the expression in expr_text. */ if (**str == '=') { + if (c->c_feature_version < 8) { + ast_error(c, n, + "f-string: self documenting expressions are " + "only supported in Python 3.8 and greater"); + goto error; + } *str += 1; /* Skip over ASCII whitespace. No need to test for end of string From webhook-mailer at python.org Sat Jun 6 07:21:59 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Sat, 06 Jun 2020 11:21:59 -0000 Subject: [Python-checkins] [workflow] Use gcc problem matcher for Ubuntu action build (GH-18567) Message-ID: https://github.com/python/cpython/commit/5552850f8e6ad6bf610c2633c74ed42dacc81b46 commit: 5552850f8e6ad6bf610c2633c74ed42dacc81b46 branch: master author: Ammar Askar committer: GitHub date: 2020-06-06T12:21:46+01:00 summary: [workflow] Use gcc problem matcher for Ubuntu action build (GH-18567) files: A .github/problem-matchers/gcc.json M .github/workflows/build.yml diff --git a/.github/problem-matchers/gcc.json b/.github/problem-matchers/gcc.json new file mode 100644 index 0000000000000..bd5ab6c00a760 --- /dev/null +++ b/.github/problem-matchers/gcc.json @@ -0,0 +1,18 @@ +{ + "__comment": "Taken from vscode-cpptools's Extension/package.json gcc rule", + "problemMatcher": [ + { + "owner": "gcc-problem-matcher", + "pattern": [ + { + "regexp": "^(.*):(\\d+):(\\d+):\\s+(?:fatal\\s+)?(warning|error):\\s+(.*)$", + "file": 1, + "line": 2, + "column": 3, + "severity": 4, + "message": 5 + } + ] + } + ] +} \ No newline at end of file diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6bb52cb6a5daa..5649a6670e75f 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -87,6 +87,8 @@ jobs: OPENSSL_VER: 1.1.1f steps: - uses: actions/checkout at v2 + - name: Register gcc problem matcher + run: echo "::add-matcher::.github/problem-matchers/gcc.json" - name: Install Dependencies run: sudo ./.github/workflows/posix-deps-apt.sh - name: 'Restore OpenSSL build' From webhook-mailer at python.org Sat Jun 6 08:44:20 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 06 Jun 2020 12:44:20 -0000 Subject: [Python-checkins] bpo-40870: Invalidate usage of some constants with ast.Name (GH-20649) Message-ID: https://github.com/python/cpython/commit/68874a8502da440a1dc4746cf73262648b870aee commit: 68874a8502da440a1dc4746cf73262648b870aee branch: master author: Batuhan Taskaya committer: GitHub date: 2020-06-06T05:44:16-07:00 summary: bpo-40870: Invalidate usage of some constants with ast.Name (GH-20649) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst M Lib/test/test_ast.py M Python/ast.c diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index a3b366ec35da1..78e4a5653d4ef 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -668,6 +668,13 @@ def test_issue40614_feature_version(self): with self.assertRaises(SyntaxError): ast.parse('f"{x=}"', feature_version=(3, 7)) + def test_constant_as_name(self): + for constant in "True", "False", "None": + expr = ast.Expression(ast.Name(constant, ast.Load())) + ast.fix_missing_locations(expr) + with self.assertRaisesRegex(ValueError, f"Name node can't be used with '{constant}' constant"): + compile(expr, "", "eval") + class ASTHelpers_Test(unittest.TestCase): maxDiff = None diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst new file mode 100644 index 0000000000000..8e943a29f337f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst @@ -0,0 +1,2 @@ +Raise :exc:`ValueError` when validating custom AST's where the constants +``True``, ``False`` and ``None`` are used within a :class:`ast.Name` node. diff --git a/Python/ast.c b/Python/ast.c index c524b8e34e873..408591f32536f 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -21,6 +21,25 @@ static int validate_nonempty_seq(asdl_seq *, const char *, const char *); static int validate_stmt(stmt_ty); static int validate_expr(expr_ty, expr_context_ty); +static int +validate_name(PyObject *name) +{ + assert(PyUnicode_Check(name)); + static const char * const forbidden[] = { + "None", + "True", + "False", + NULL + }; + for (int i = 0; forbidden[i] != NULL; i++) { + if (_PyUnicode_EqualToASCIIString(name, forbidden[i])) { + PyErr_Format(PyExc_ValueError, "Name node can't be used with '%s' constant", forbidden[i]); + return 0; + } + } + return 1; +} + static int validate_comprehension(asdl_seq *gens) { @@ -173,6 +192,9 @@ validate_expr(expr_ty exp, expr_context_ty ctx) actual_ctx = exp->v.Starred.ctx; break; case Name_kind: + if (!validate_name(exp->v.Name.id)) { + return 0; + } actual_ctx = exp->v.Name.ctx; break; case List_kind: From webhook-mailer at python.org Sat Jun 6 13:04:51 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 06 Jun 2020 17:04:51 -0000 Subject: [Python-checkins] bpo-40870: Invalidate usage of some constants with ast.Name (GH-20649) Message-ID: https://github.com/python/cpython/commit/83a9ba442662c2a030b45955f3dd24ff4b24bb61 commit: 83a9ba442662c2a030b45955f3dd24ff4b24bb61 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-06T10:04:47-07:00 summary: bpo-40870: Invalidate usage of some constants with ast.Name (GH-20649) (cherry picked from commit 68874a8502da440a1dc4746cf73262648b870aee) Co-authored-by: Batuhan Taskaya files: A Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst M Lib/test/test_ast.py M Python/ast.c diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py index 869346664499c..b921f4a5d6826 100644 --- a/Lib/test/test_ast.py +++ b/Lib/test/test_ast.py @@ -635,6 +635,13 @@ def test_issue40614_feature_version(self): with self.assertRaises(SyntaxError): ast.parse('f"{x=}"', feature_version=(3, 7)) + def test_constant_as_name(self): + for constant in "True", "False", "None": + expr = ast.Expression(ast.Name(constant, ast.Load())) + ast.fix_missing_locations(expr) + with self.assertRaisesRegex(ValueError, f"Name node can't be used with '{constant}' constant"): + compile(expr, "", "eval") + class ASTHelpers_Test(unittest.TestCase): maxDiff = None diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst new file mode 100644 index 0000000000000..8e943a29f337f --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst @@ -0,0 +1,2 @@ +Raise :exc:`ValueError` when validating custom AST's where the constants +``True``, ``False`` and ``None`` are used within a :class:`ast.Name` node. diff --git a/Python/ast.c b/Python/ast.c index 594879bd0ef0b..0a999fcca43a8 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -21,6 +21,25 @@ static int validate_nonempty_seq(asdl_seq *, const char *, const char *); static int validate_stmt(stmt_ty); static int validate_expr(expr_ty, expr_context_ty); +static int +validate_name(PyObject *name) +{ + assert(PyUnicode_Check(name)); + static const char * const forbidden[] = { + "None", + "True", + "False", + NULL + }; + for (int i = 0; forbidden[i] != NULL; i++) { + if (_PyUnicode_EqualToASCIIString(name, forbidden[i])) { + PyErr_Format(PyExc_ValueError, "Name node can't be used with '%s' constant", forbidden[i]); + return 0; + } + } + return 1; +} + static int validate_comprehension(asdl_seq *gens) { @@ -199,6 +218,9 @@ validate_expr(expr_ty exp, expr_context_ty ctx) actual_ctx = exp->v.Starred.ctx; break; case Name_kind: + if (!validate_name(exp->v.Name.id)) { + return 0; + } actual_ctx = exp->v.Name.ctx; break; case List_kind: From webhook-mailer at python.org Sat Jun 6 15:35:18 2020 From: webhook-mailer at python.org (scoder) Date: Sat, 06 Jun 2020 19:35:18 -0000 Subject: [Python-checkins] bpo-40724: Support setting buffer slots from type specs (GH-20648) Message-ID: https://github.com/python/cpython/commit/f7c4e236429606e1c982cacf24e10fc86ef4462f commit: f7c4e236429606e1c982cacf24e10fc86ef4462f branch: master author: scoder committer: GitHub date: 2020-06-06T21:35:10+02:00 summary: bpo-40724: Support setting buffer slots from type specs (GH-20648) This is not part of the limited API but makes the buffer slots available for type specs. files: A Misc/NEWS.d/next/C API/2020-06-04-08-01-23.bpo-40724.qIIdSi.rst M Include/typeslots.h M Lib/test/test_capi.py M Modules/_testcapimodule.c M Objects/typeslots.inc diff --git a/Include/typeslots.h b/Include/typeslots.h index 0ce6a377dcfbd..64f6fff514449 100644 --- a/Include/typeslots.h +++ b/Include/typeslots.h @@ -1,7 +1,12 @@ /* Do not renumber the file; these numbers are part of the stable ABI. */ +#if defined(Py_LIMITED_API) /* Disabled, see #10181 */ #undef Py_bf_getbuffer #undef Py_bf_releasebuffer +#else +#define Py_bf_getbuffer 1 +#define Py_bf_releasebuffer 2 +#endif #define Py_mp_ass_subscript 3 #define Py_mp_length 4 #define Py_mp_subscript 5 diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 5b8b9f6a86f4b..73e167a0b05a5 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -477,6 +477,11 @@ def test_heaptype_with_weakref(self): self.assertEqual(ref(), inst) self.assertEqual(inst.weakreflist, ref) + def test_heaptype_with_buffer(self): + inst = _testcapi.HeapCTypeWithBuffer() + b = bytes(inst) + self.assertEqual(b, b"1234") + def test_c_subclass_of_heap_ctype_with_tpdealloc_decrefs_once(self): subclass_instance = _testcapi.HeapCTypeSubclass() type_refcnt = sys.getrefcount(_testcapi.HeapCTypeSubclass) diff --git a/Misc/NEWS.d/next/C API/2020-06-04-08-01-23.bpo-40724.qIIdSi.rst b/Misc/NEWS.d/next/C API/2020-06-04-08-01-23.bpo-40724.qIIdSi.rst new file mode 100644 index 0000000000000..82793dbf7ad5f --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-04-08-01-23.bpo-40724.qIIdSi.rst @@ -0,0 +1 @@ +Allow defining buffer slots in type specs. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 101d54932d913..d6a90b807d026 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -6298,6 +6298,47 @@ static PyType_Spec HeapCTypeSubclass_spec = { HeapCTypeSubclass_slots }; +PyDoc_STRVAR(heapctypewithbuffer__doc__, +"Heap type with buffer support.\n\n" +"The buffer is set to [b'1', b'2', b'3', b'4']"); + +typedef struct { + HeapCTypeObject base; + char buffer[4]; +} HeapCTypeWithBufferObject; + +static int +heapctypewithbuffer_getbuffer(HeapCTypeWithBufferObject *self, Py_buffer *view, int flags) +{ + self->buffer[0] = '1'; + self->buffer[1] = '2'; + self->buffer[2] = '3'; + self->buffer[3] = '4'; + return PyBuffer_FillInfo( + view, (PyObject*)self, (void *)self->buffer, 4, 1, flags); +} + +static int +heapctypewithbuffer_releasebuffer(HeapCTypeWithBufferObject *self, Py_buffer *view) +{ + assert(view->obj == (void*) self); +} + +static PyType_Slot HeapCTypeWithBuffer_slots[] = { + {Py_bf_getbuffer, heapctypewithbuffer_getbuffer}, + {Py_bf_releasebuffer, heapctypewithbuffer_releasebuffer}, + {Py_tp_doc, (char*)heapctypewithbuffer__doc__}, + {0, 0}, +}; + +static PyType_Spec HeapCTypeWithBuffer_spec = { + "_testcapi.HeapCTypeWithBuffer", + sizeof(HeapCTypeWithBufferObject), + 0, + Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE, + HeapCTypeWithBuffer_slots +}; + PyDoc_STRVAR(heapctypesubclasswithfinalizer__doc__, "Subclass of HeapCType with a finalizer that reassigns __class__.\n\n" "__class__ is set to plain HeapCTypeSubclass during finalization.\n" @@ -6775,6 +6816,12 @@ PyInit__testcapi(void) } PyModule_AddObject(m, "HeapCTypeWithWeakref", HeapCTypeWithWeakref); + PyObject *HeapCTypeWithBuffer = PyType_FromSpec(&HeapCTypeWithBuffer_spec); + if (HeapCTypeWithBuffer == NULL) { + return NULL; + } + PyModule_AddObject(m, "HeapCTypeWithBuffer", HeapCTypeWithBuffer); + PyObject *subclass_with_finalizer_bases = PyTuple_Pack(1, HeapCTypeSubclass); if (subclass_with_finalizer_bases == NULL) { return NULL; diff --git a/Objects/typeslots.inc b/Objects/typeslots.inc index dc750cc0c4197..ffc9bb2e1c771 100644 --- a/Objects/typeslots.inc +++ b/Objects/typeslots.inc @@ -1,6 +1,6 @@ /* Generated by typeslots.py */ -0, -0, +offsetof(PyHeapTypeObject, as_buffer.bf_getbuffer), +offsetof(PyHeapTypeObject, as_buffer.bf_releasebuffer), offsetof(PyHeapTypeObject, as_mapping.mp_ass_subscript), offsetof(PyHeapTypeObject, as_mapping.mp_length), offsetof(PyHeapTypeObject, as_mapping.mp_subscript), From webhook-mailer at python.org Sat Jun 6 15:43:03 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sat, 06 Jun 2020 19:43:03 -0000 Subject: [Python-checkins] Update comments to reflect the current API (GH-20682) Message-ID: https://github.com/python/cpython/commit/0e96c419d7287c3c7f155c9f2de3c61020386256 commit: 0e96c419d7287c3c7f155c9f2de3c61020386256 branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-06T12:42:54-07:00 summary: Update comments to reflect the current API (GH-20682) files: M Lib/collections/__init__.py diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 2acf67289f225..03393f35b11c5 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -760,10 +760,12 @@ def __repr__(self): # set(cp - cq) == sp - sq # set(cp | cq) == sp | sq # set(cp & cq) == sp & sq - # cp.isequal(cq) == (sp == sq) - # cp.issubset(cq) == sp.issubset(sq) - # cp.issuperset(cq) == sp.issuperset(sq) - # cp.isdisjoint(cq) == sp.isdisjoint(sq) + # (cp == cq) == (sp == sq) + # (cp != cq) == (sp != sq) + # (cp <= cq) == (sp <= sq) + # (cp < cq) == (sp < sq) + # (cp >= cq) == (sp >= sq) + # (cp > cq) == (sp > sq) def __add__(self, other): '''Add counts from two counters. From webhook-mailer at python.org Sun Jun 7 08:06:01 2020 From: webhook-mailer at python.org (Hai Shi) Date: Sun, 07 Jun 2020 12:06:01 -0000 Subject: [Python-checkins] bpo-40898: Remove redundant if statements in tp_traverse (GH-20692) Message-ID: https://github.com/python/cpython/commit/47a23fc63fa5df2da8dbc542e78e521d4a7f10c9 commit: 47a23fc63fa5df2da8dbc542e78e521d4a7f10c9 branch: master author: Hai Shi committer: GitHub date: 2020-06-07T21:05:36+09:00 summary: bpo-40898: Remove redundant if statements in tp_traverse (GH-20692) files: M Modules/_functoolsmodule.c M Modules/_io/_iomodule.c M Modules/itertoolsmodule.c M Modules/overlapped.c diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index d158d3bae157b..f1ee23f294fa3 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -484,8 +484,7 @@ static int keyobject_traverse(keyobject *ko, visitproc visit, void *arg) { Py_VISIT(ko->cmp); - if (ko->object) - Py_VISIT(ko->object); + Py_VISIT(ko->object); return 0; } diff --git a/Modules/_io/_iomodule.c b/Modules/_io/_iomodule.c index a55e5cad6a392..e430352a48e21 100644 --- a/Modules/_io/_iomodule.c +++ b/Modules/_io/_iomodule.c @@ -623,9 +623,7 @@ iomodule_traverse(PyObject *mod, visitproc visit, void *arg) { _PyIO_State *state = get_io_state(mod); if (!state->initialized) return 0; - if (state->locale_module != NULL) { - Py_VISIT(state->locale_module); - } + Py_VISIT(state->locale_module); Py_VISIT(state->unsupported_operation); return 0; } diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 18fcebdf25b46..3f2f7165b171b 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -999,8 +999,7 @@ cycle_dealloc(cycleobject *lz) static int cycle_traverse(cycleobject *lz, visitproc visit, void *arg) { - if (lz->it) - Py_VISIT(lz->it); + Py_VISIT(lz->it); Py_VISIT(lz->saved); return 0; } diff --git a/Modules/overlapped.c b/Modules/overlapped.c index df6282cba819b..eed8fbf039300 100644 --- a/Modules/overlapped.c +++ b/Modules/overlapped.c @@ -1504,12 +1504,8 @@ Overlapped_traverse(OverlappedObject *self, visitproc visit, void *arg) } break; case TYPE_READ_FROM: - if(self->read_from.result) { - Py_VISIT(self->read_from.result); - } - if(self->read_from.allocated_buffer) { - Py_VISIT(self->read_from.allocated_buffer); - } + Py_VISIT(self->read_from.result); + Py_VISIT(self->read_from.allocated_buffer); } return 0; } From webhook-mailer at python.org Sun Jun 7 10:57:50 2020 From: webhook-mailer at python.org (Jason R. Coombs) Date: Sun, 07 Jun 2020 14:57:50 -0000 Subject: [Python-checkins] bpo-39791: Support file systems that cannot support non-ascii filenames (skipping tests in that case). (#20681) Message-ID: https://github.com/python/cpython/commit/2efe18bf277dd0f38a1d248ae6bdd30947c26880 commit: 2efe18bf277dd0f38a1d248ae6bdd30947c26880 branch: master author: Jason R. Coombs committer: GitHub date: 2020-06-07T10:57:45-04:00 summary: bpo-39791: Support file systems that cannot support non-ascii filenames (skipping tests in that case). (#20681) files: M Lib/test/test_importlib/fixtures.py M Lib/test/test_importlib/test_main.py diff --git a/Lib/test/test_importlib/fixtures.py b/Lib/test/test_importlib/fixtures.py index b25febb7fe756..2e55d14b9aab9 100644 --- a/Lib/test/test_importlib/fixtures.py +++ b/Lib/test/test_importlib/fixtures.py @@ -210,6 +210,17 @@ def build_files(file_defs, prefix=pathlib.Path()): f.write(DALS(contents)) +class FileBuilder: + def unicode_filename(self): + try: + import test.support + except ImportError: + # outside CPython, hard-code a unicode snowman + return '?' + return test.support.FS_NONASCII or \ + self.skip("File system does not support non-ascii.") + + def DALS(str): "Dedent and left-strip" return textwrap.dedent(str).lstrip() diff --git a/Lib/test/test_importlib/test_main.py b/Lib/test/test_importlib/test_main.py index 7b18c3de16eea..91e501a2eb7cd 100644 --- a/Lib/test/test_importlib/test_main.py +++ b/Lib/test/test_importlib/test_main.py @@ -254,11 +254,16 @@ def test_attr(self): assert self.ep.attr is None -class FileSystem(fixtures.OnSysPath, fixtures.SiteDir, unittest.TestCase): +class FileSystem( + fixtures.OnSysPath, fixtures.SiteDir, fixtures.FileBuilder, + unittest.TestCase): def test_unicode_dir_on_sys_path(self): """ Ensure a Unicode subdirectory of a directory on sys.path does not crash. """ - fixtures.build_files({'?': {}}, prefix=self.site_dir) + fixtures.build_files( + {self.unicode_filename(): {}}, + prefix=self.site_dir, + ) list(distributions()) From webhook-mailer at python.org Sun Jun 7 19:22:40 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Sun, 07 Jun 2020 23:22:40 -0000 Subject: [Python-checkins] bpo-40887: Fix finalize_interp_clear() for free lists (GH-20698) Message-ID: https://github.com/python/cpython/commit/7907f8cbc6923240edb0b5b63adafb871c4c8875 commit: 7907f8cbc6923240edb0b5b63adafb871c4c8875 branch: master author: Victor Stinner committer: GitHub date: 2020-06-08T01:22:36+02:00 summary: bpo-40887: Fix finalize_interp_clear() for free lists (GH-20698) Reorganize code to ensure that free lists are cleared in the right order. Call _PyWarnings_Fini() before _PyList_Fini(). files: M Python/pylifecycle.c diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 6d2eb1defc884..d730a98d3e5b9 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1250,37 +1250,26 @@ static void finalize_interp_types(PyThreadState *tstate, int is_main_interp) { _PyFrame_Fini(tstate); - _PyTuple_Fini(tstate); - _PyList_Fini(tstate); + _PyAsyncGen_Fini(tstate); + _PyContext_Fini(tstate); + if (is_main_interp) { _PySet_Fini(); - _PyBytes_Fini(); } - - _PyLong_Fini(tstate); - _PyFloat_Fini(tstate); - if (is_main_interp) { _PyDict_Fini(); } + _PyList_Fini(tstate); + _PyTuple_Fini(tstate); _PySlice_Fini(tstate); - _PyWarnings_Fini(tstate->interp); if (is_main_interp) { - _Py_HashRandomization_Fini(); - _PyArg_Fini(); + _PyBytes_Fini(); } - - _PyAsyncGen_Fini(tstate); - _PyContext_Fini(tstate); - - /* Cleanup Unicode implementation */ _PyUnicode_Fini(tstate); - - if (is_main_interp) { - _Py_ClearFileSystemEncoding(); - } + _PyFloat_Fini(tstate); + _PyLong_Fini(tstate); } @@ -1299,19 +1288,20 @@ finalize_interp_clear(PyThreadState *tstate) _PyGC_Fini(tstate); - finalize_interp_types(tstate, is_main_interp); - if (is_main_interp) { - /* XXX Still allocated: - - various static ad-hoc pointers to interned strings - - int and float free list blocks - - whatever various modules and libraries allocate - */ + _Py_HashRandomization_Fini(); + _PyArg_Fini(); + _Py_ClearFileSystemEncoding(); + } - PyGrammar_RemoveAccelerators(&_PyParser_Grammar); + _PyWarnings_Fini(tstate->interp); + if (is_main_interp) { + PyGrammar_RemoveAccelerators(&_PyParser_Grammar); _PyExc_Fini(); } + + finalize_interp_types(tstate, is_main_interp); } From webhook-mailer at python.org Sun Jun 7 19:39:52 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Sun, 07 Jun 2020 23:39:52 -0000 Subject: [Python-checkins] bpo-40881: Fix unicode_release_interned() (GH-20699) Message-ID: https://github.com/python/cpython/commit/c96a61e8163c2d25ed4ac77cf96201fd0bdb945c commit: c96a61e8163c2d25ed4ac77cf96201fd0bdb945c branch: master author: Victor Stinner committer: GitHub date: 2020-06-08T01:39:47+02:00 summary: bpo-40881: Fix unicode_release_interned() (GH-20699) Use Py_SET_REFCNT() in unicode_release_interned(). files: M Objects/unicodeobject.c diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index e69bf01251ced..df10888949aba 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -15669,13 +15669,13 @@ unicode_release_interned(void) } switch (PyUnicode_CHECK_INTERNED(s)) { case SSTATE_INTERNED_IMMORTAL: - Py_REFCNT(s) += 1; + Py_SET_REFCNT(s, Py_REFCNT(s) + 1); #ifdef INTERNED_STATS immortal_size += PyUnicode_GET_LENGTH(s); #endif break; case SSTATE_INTERNED_MORTAL: - Py_REFCNT(s) += 2; + Py_SET_REFCNT(s, Py_REFCNT(s) + 2); #ifdef INTERNED_STATS mortal_size += PyUnicode_GET_LENGTH(s); #endif From webhook-mailer at python.org Sun Jun 7 20:14:55 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 08 Jun 2020 00:14:55 -0000 Subject: [Python-checkins] bpo-40887: Don't use finalized free lists (GH-20700) Message-ID: https://github.com/python/cpython/commit/bcb198385dee469d630a184182df9dc1463e2c47 commit: bcb198385dee469d630a184182df9dc1463e2c47 branch: master author: Victor Stinner committer: GitHub date: 2020-06-08T02:14:47+02:00 summary: bpo-40887: Don't use finalized free lists (GH-20700) In debug mode, ensure that free lists are no longer used after being finalized. Set numfree to -1 in finalization functions (eg. _PyList_Fini()), and then check that numfree is not equal to -1 before using a free list (e.g list_dealloc()). files: M Objects/floatobject.c M Objects/frameobject.c M Objects/genobject.c M Objects/listobject.c M Objects/tupleobject.c M Python/context.c diff --git a/Objects/floatobject.c b/Objects/floatobject.c index d72fd21f95faf..65625fe88cad8 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -116,6 +116,10 @@ PyFloat_FromDouble(double fval) struct _Py_float_state *state = &interp->float_state; PyFloatObject *op = state->free_list; if (op != NULL) { +#ifdef Py_DEBUG + // PyFloat_FromDouble() must not be called after _PyFloat_Fini() + assert(state->numfree != -1); +#endif state->free_list = (PyFloatObject *) Py_TYPE(op); state->numfree--; } @@ -219,6 +223,10 @@ float_dealloc(PyFloatObject *op) if (PyFloat_CheckExact(op)) { PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_float_state *state = &interp->float_state; +#ifdef Py_DEBUG + // float_dealloc() must not be called after _PyFloat_Fini() + assert(state->numfree != -1); +#endif if (state->numfree >= PyFloat_MAXFREELIST) { PyObject_FREE(op); return; @@ -1984,10 +1992,11 @@ void _PyFloat_ClearFreeList(PyThreadState *tstate) { struct _Py_float_state *state = &tstate->interp->float_state; - PyFloatObject *f = state->free_list, *next; - for (; f; f = next) { - next = (PyFloatObject*) Py_TYPE(f); + PyFloatObject *f = state->free_list; + while (f != NULL) { + PyFloatObject *next = (PyFloatObject*) Py_TYPE(f); PyObject_FREE(f); + f = next; } state->free_list = NULL; state->numfree = 0; @@ -1997,6 +2006,10 @@ void _PyFloat_Fini(PyThreadState *tstate) { _PyFloat_ClearFreeList(tstate); +#ifdef Py_DEBUG + struct _Py_float_state *state = &tstate->interp->float_state; + state->numfree = -1; +#endif } /* Print summary info about the state of the optimized allocator */ diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 0fe9f2a6666b2..0dad42ee7bff3 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -595,6 +595,10 @@ frame_dealloc(PyFrameObject *f) else { PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_frame_state *state = &interp->frame; +#ifdef Py_DEBUG + // frame_dealloc() must not be called after _PyFrame_Fini() + assert(state->numfree != -1); +#endif if (state->numfree < PyFrame_MAXFREELIST) { ++state->numfree; f->f_back = state->free_list; @@ -790,6 +794,10 @@ frame_alloc(PyCodeObject *code) } } else { +#ifdef Py_DEBUG + // frame_alloc() must not be called after _PyFrame_Fini() + assert(state->numfree != -1); +#endif assert(state->numfree > 0); --state->numfree; f = state->free_list; @@ -1188,6 +1196,10 @@ void _PyFrame_Fini(PyThreadState *tstate) { _PyFrame_ClearFreeList(tstate); +#ifdef Py_DEBUG + struct _Py_frame_state *state = &tstate->interp->frame; + state->numfree = -1; +#endif } /* Print summary info about the state of the optimized allocator */ diff --git a/Objects/genobject.c b/Objects/genobject.c index f7dbfd7486419..4207d5326cca1 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -1430,6 +1430,11 @@ void _PyAsyncGen_Fini(PyThreadState *tstate) { _PyAsyncGen_ClearFreeLists(tstate); +#ifdef Py_DEBUG + struct _Py_async_gen_state *state = &tstate->interp->async_gen; + state->value_numfree = -1; + state->asend_numfree = -1; +#endif } @@ -1474,6 +1479,10 @@ async_gen_asend_dealloc(PyAsyncGenASend *o) Py_CLEAR(o->ags_sendval); PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_async_gen_state *state = &interp->async_gen; +#ifdef Py_DEBUG + // async_gen_asend_dealloc() must not be called after _PyAsyncGen_Fini() + assert(state->asend_numfree != -1); +#endif if (state->asend_numfree < _PyAsyncGen_MAXFREELIST) { assert(PyAsyncGenASend_CheckExact(o)); state->asend_freelist[state->asend_numfree++] = o; @@ -1632,6 +1641,10 @@ async_gen_asend_new(PyAsyncGenObject *gen, PyObject *sendval) PyAsyncGenASend *o; PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_async_gen_state *state = &interp->async_gen; +#ifdef Py_DEBUG + // async_gen_asend_new() must not be called after _PyAsyncGen_Fini() + assert(state->asend_numfree != -1); +#endif if (state->asend_numfree) { state->asend_numfree--; o = state->asend_freelist[state->asend_numfree]; @@ -1667,6 +1680,10 @@ async_gen_wrapped_val_dealloc(_PyAsyncGenWrappedValue *o) Py_CLEAR(o->agw_val); PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_async_gen_state *state = &interp->async_gen; +#ifdef Py_DEBUG + // async_gen_wrapped_val_dealloc() must not be called after _PyAsyncGen_Fini() + assert(state->value_numfree != -1); +#endif if (state->value_numfree < _PyAsyncGen_MAXFREELIST) { assert(_PyAsyncGenWrappedValue_CheckExact(o)); state->value_freelist[state->value_numfree++] = o; @@ -1737,6 +1754,10 @@ _PyAsyncGenValueWrapperNew(PyObject *val) PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_async_gen_state *state = &interp->async_gen; +#ifdef Py_DEBUG + // _PyAsyncGenValueWrapperNew() must not be called after _PyAsyncGen_Fini() + assert(state->value_numfree != -1); +#endif if (state->value_numfree) { state->value_numfree--; o = state->value_freelist[state->value_numfree]; diff --git a/Objects/listobject.c b/Objects/listobject.c index 043256d8adbf5..22cdbe3cfdd41 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -111,6 +111,10 @@ void _PyList_Fini(PyThreadState *tstate) { _PyList_ClearFreeList(tstate); +#ifdef Py_DEBUG + struct _Py_list_state *state = &tstate->interp->list; + state->numfree = -1; +#endif } /* Print summary info about the state of the optimized allocator */ @@ -135,6 +139,10 @@ PyList_New(Py_ssize_t size) PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_list_state *state = &interp->list; PyListObject *op; +#ifdef Py_DEBUG + // PyList_New() must not be called after _PyList_Fini() + assert(state->numfree != -1); +#endif if (state->numfree) { state->numfree--; op = state->free_list[state->numfree]; @@ -330,6 +338,10 @@ list_dealloc(PyListObject *op) } PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_list_state *state = &interp->list; +#ifdef Py_DEBUG + // list_dealloc() must not be called after _PyList_Fini() + assert(state->numfree != -1); +#endif if (state->numfree < PyList_MAXFREELIST && PyList_CheckExact(op)) { state->free_list[state->numfree++] = op; } diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 951cd1faf7e8f..8bfa0894a79d4 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -54,6 +54,10 @@ tuple_alloc(struct _Py_tuple_state *state, Py_ssize_t size) return NULL; } #if PyTuple_MAXSAVESIZE > 0 +#ifdef Py_DEBUG + // tuple_alloc() must not be called after _PyTuple_Fini() + assert(state->numfree[0] != -1); +#endif if (size < PyTuple_MAXSAVESIZE && (op = state->free_list[size]) != NULL) { assert(size != 0); state->free_list[size] = (PyTupleObject *) op->ob_item[0]; @@ -102,6 +106,10 @@ PyTuple_New(Py_ssize_t size) } #if PyTuple_MAXSAVESIZE > 0 if (size == 0) { +#ifdef Py_DEBUG + // PyTuple_New() must not be called after _PyTuple_Fini() + assert(state->numfree[0] != -1); +#endif state->free_list[0] = op; ++state->numfree[0]; Py_INCREF(op); /* extra INCREF so that this is never freed */ @@ -227,6 +235,10 @@ tupledealloc(PyTupleObject *op) #if PyTuple_MAXSAVESIZE > 0 PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_tuple_state *state = &interp->tuple; +#ifdef Py_DEBUG + // tupledealloc() must not be called after _PyTuple_Fini() + assert(state->numfree[0] != -1); +#endif if (len < PyTuple_MAXSAVESIZE && state->numfree[len] < PyTuple_MAXFREELIST && Py_IS_TYPE(op, &PyTuple_Type)) @@ -984,6 +996,9 @@ _PyTuple_Fini(PyThreadState *tstate) Py_CLEAR(state->free_list[0]); _PyTuple_ClearFreeList(tstate); +#ifdef Py_DEBUG + state->numfree[0] = -1; +#endif #endif } diff --git a/Python/context.c b/Python/context.c index 3cf8db4c90cdf..dedbca99384c7 100644 --- a/Python/context.c +++ b/Python/context.c @@ -335,6 +335,10 @@ _context_alloc(void) PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_context_state *state = &interp->context; PyContext *ctx; +#ifdef Py_DEBUG + // _context_alloc() must not be called after _PyContext_Fini() + assert(state->numfree != -1); +#endif if (state->numfree) { state->numfree--; ctx = state->freelist; @@ -460,6 +464,10 @@ context_tp_dealloc(PyContext *self) PyInterpreterState *interp = _PyInterpreterState_GET(); struct _Py_context_state *state = &interp->context; +#ifdef Py_DEBUG + // _context_alloc() must not be called after _PyContext_Fini() + assert(state->numfree != -1); +#endif if (state->numfree < CONTEXT_FREELIST_MAXLEN) { state->numfree++; self->ctx_weakreflist = (PyObject *)state->freelist; @@ -1290,6 +1298,10 @@ _PyContext_Fini(PyThreadState *tstate) { Py_CLEAR(_token_missing); _PyContext_ClearFreeList(tstate); +#ifdef Py_DEBUG + struct _Py_context_state *state = &tstate->interp->context; + state->numfree = -1; +#endif _PyHamt_Fini(); } From webhook-mailer at python.org Sun Jun 7 20:47:42 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 08 Jun 2020 00:47:42 -0000 Subject: [Python-checkins] bpo-40904: Fix segfault in the new parser with f-string containing yield statements with no value (GH-20701) Message-ID: https://github.com/python/cpython/commit/972ab0327675e695373fc6272d5ac24e187579ad commit: 972ab0327675e695373fc6272d5ac24e187579ad branch: master author: Pablo Galindo committer: GitHub date: 2020-06-08T01:47:37+01:00 summary: bpo-40904: Fix segfault in the new parser with f-string containing yield statements with no value (GH-20701) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-08-01-08-57.bpo-40904.76qQzo.rst M Lib/test/test_fstring.py M Parser/pegen/parse_string.c diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index ea4e589929e7e..9048e89689df2 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -725,9 +725,11 @@ def test_yield(self): # a function into a generator def fn(y): f'y:{yield y*2}' + f'{yield}' g = fn(4) self.assertEqual(next(g), 8) + self.assertEqual(next(g), None) def test_yield_send(self): def fn(x): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-08-01-08-57.bpo-40904.76qQzo.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-08-01-08-57.bpo-40904.76qQzo.rst new file mode 100644 index 0000000000000..09009b18c63a3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-08-01-08-57.bpo-40904.76qQzo.rst @@ -0,0 +1,2 @@ +Fix possible segfault in the new PEG parser when parsing f-string containing +yield statements with no value (:code:`f"{yield}"`). Patch by Pablo Galindo diff --git a/Parser/pegen/parse_string.c b/Parser/pegen/parse_string.c index efe82df47658b..94241e1965e9a 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/pegen/parse_string.c @@ -278,6 +278,9 @@ static void fstring_shift_argument(expr_ty parent, arg_ty args, int lineno, int static inline void shift_expr(expr_ty parent, expr_ty n, int line, int col) { + if (n == NULL) { + return; + } if (parent->lineno < n->lineno) { col = 0; } From webhook-mailer at python.org Sun Jun 7 21:00:56 2020 From: webhook-mailer at python.org (Jason R. Coombs) Date: Mon, 08 Jun 2020 01:00:56 -0000 Subject: [Python-checkins] bpo-39791 native hooks for importlib.resources.files (GH-20576) Message-ID: https://github.com/python/cpython/commit/843c27765652e2322011fb3e5d88f4837de38c06 commit: 843c27765652e2322011fb3e5d88f4837de38c06 branch: master author: Jason R. Coombs committer: GitHub date: 2020-06-07T21:00:51-04:00 summary: bpo-39791 native hooks for importlib.resources.files (GH-20576) * Provide native .files support on SourceFileLoader. * Add native importlib.resources.files() support to zipimporter. Remove fallback support. * make regen-all * ?? Added by blurb_it. * Move 'files' into the ResourceReader so it can carry the relevant module name context. * Create 'importlib.readers' module and add FileReader to it. * Add zip reader and rely on it for a TraversableResources object on zipimporter. * Remove TraversableAdapter, no longer needed. * Update blurb. * Replace backslashes with forward slashes. * Incorporate changes from importlib_metadata 2.0, finalizing the interface for extension via get_resource_reader. Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Lib/importlib/readers.py A Misc/NEWS.d/next/Library/2020-06-02-02-16-02.bpo-39791.StCJlA.rst M Lib/importlib/_bootstrap_external.py M Lib/importlib/_common.py M Lib/importlib/abc.py M Lib/importlib/resources.py M Lib/zipimport.py M Python/importlib_external.h M Python/importlib_zipimport.h diff --git a/Lib/importlib/_bootstrap_external.py b/Lib/importlib/_bootstrap_external.py index 25a3f8c0e0934..4f06039f3d23c 100644 --- a/Lib/importlib/_bootstrap_external.py +++ b/Lib/importlib/_bootstrap_external.py @@ -982,32 +982,10 @@ def get_data(self, path): with _io.FileIO(path, 'r') as file: return file.read() - # ResourceReader ABC API. - @_check_name def get_resource_reader(self, module): - if self.is_package(module): - return self - return None - - def open_resource(self, resource): - path = _path_join(_path_split(self.path)[0], resource) - return _io.FileIO(path, 'r') - - def resource_path(self, resource): - if not self.is_resource(resource): - raise FileNotFoundError - path = _path_join(_path_split(self.path)[0], resource) - return path - - def is_resource(self, name): - if path_sep in name: - return False - path = _path_join(_path_split(self.path)[0], name) - return _path_isfile(path) - - def contents(self): - return iter(_os.listdir(_path_split(self.path)[0])) + from importlib.readers import FileReader + return FileReader(self) class SourceFileLoader(FileLoader, SourceLoader): diff --git a/Lib/importlib/_common.py b/Lib/importlib/_common.py index ba7cbac3c9bfd..b15c59eb9c98a 100644 --- a/Lib/importlib/_common.py +++ b/Lib/importlib/_common.py @@ -1,38 +1,82 @@ import os import pathlib -import zipfile import tempfile import functools import contextlib +import types +import importlib +from typing import Union, Any, Optional +from .abc import ResourceReader -def from_package(package): +Package = Union[types.ModuleType, str] + + +def files(package): """ - Return a Traversable object for the given package. + Get a Traversable resource from a package + """ + return from_package(get_package(package)) + +def normalize_path(path): + # type: (Any) -> str + """Normalize a path by ensuring it is a string. + + If the resulting string contains path separators, an exception is raised. """ - spec = package.__spec__ - return from_traversable_resources(spec) or fallback_resources(spec) + str_path = str(path) + parent, file_name = os.path.split(str_path) + if parent: + raise ValueError('{!r} must be only a file name'.format(path)) + return file_name -def from_traversable_resources(spec): +def get_resource_reader(package): + # type: (types.ModuleType) -> Optional[ResourceReader] """ - If the spec.loader implements TraversableResources, - directly or implicitly, it will have a ``files()`` method. + Return the package's loader if it's a ResourceReader. """ - with contextlib.suppress(AttributeError): - return spec.loader.files() + # We can't use + # a issubclass() check here because apparently abc.'s __subclasscheck__() + # hook wants to create a weak reference to the object, but + # zipimport.zipimporter does not support weak references, resulting in a + # TypeError. That seems terrible. + spec = package.__spec__ + reader = getattr(spec.loader, 'get_resource_reader', None) + if reader is None: + return None + return reader(spec.name) -def fallback_resources(spec): - package_directory = pathlib.Path(spec.origin).parent - try: - archive_path = spec.loader.archive - rel_path = package_directory.relative_to(archive_path) - return zipfile.Path(archive_path, str(rel_path) + '/') - except Exception: - pass - return package_directory +def resolve(cand): + # type: (Package) -> types.ModuleType + return ( + cand if isinstance(cand, types.ModuleType) + else importlib.import_module(cand) + ) + + +def get_package(package): + # type: (Package) -> types.ModuleType + """Take a package name or module object and return the module. + + Raise an exception if the resolved module is not a package. + """ + resolved = resolve(package) + if resolved.__spec__.submodule_search_locations is None: + raise TypeError('{!r} is not a package'.format(package)) + return resolved + + +def from_package(package): + """ + Return a Traversable object for the given package. + + """ + spec = package.__spec__ + reader = spec.loader.get_resource_reader(spec.name) + return reader.files() @contextlib.contextmanager diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index b8a9bb1a21ef7..0b20e7c13f282 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -468,7 +468,7 @@ def resource_path(self, resource): raise FileNotFoundError(resource) def is_resource(self, path): - return self.files().joinpath(path).isfile() + return self.files().joinpath(path).is_file() def contents(self): return (item.name for item in self.files().iterdir()) diff --git a/Lib/importlib/readers.py b/Lib/importlib/readers.py new file mode 100644 index 0000000000000..fb49ebe2b1642 --- /dev/null +++ b/Lib/importlib/readers.py @@ -0,0 +1,30 @@ +import zipfile +import pathlib +from . import abc + + +class FileReader(abc.TraversableResources): + def __init__(self, loader): + self.path = pathlib.Path(loader.path).parent + + def files(self): + return self.path + + +class ZipReader(FileReader): + def __init__(self, loader, module): + _, _, name = module.rpartition('.') + prefix = loader.prefix.replace('\\', '/') + name + '/' + self.path = zipfile.Path(loader.archive, prefix) + + def open_resource(self, resource): + try: + return super().open_resource(resource) + except KeyError as exc: + raise FileNotFoundError(exc.args[0]) + + def is_resource(self, path): + # workaround for `zipfile.Path.is_file` returning true + # for non-existent paths. + target = self.files().joinpath(path) + return target.is_file() and target.exists() diff --git a/Lib/importlib/resources.py b/Lib/importlib/resources.py index b803a01c91d65..4535619f4f014 100644 --- a/Lib/importlib/resources.py +++ b/Lib/importlib/resources.py @@ -1,15 +1,13 @@ import os -from . import abc as resources_abc from . import _common -from ._common import as_file +from ._common import as_file, files from contextlib import contextmanager, suppress -from importlib import import_module from importlib.abc import ResourceLoader from io import BytesIO, TextIOWrapper from pathlib import Path from types import ModuleType -from typing import ContextManager, Iterable, Optional, Union +from typing import ContextManager, Iterable, Union from typing import cast from typing.io import BinaryIO, TextIO @@ -33,60 +31,11 @@ Resource = Union[str, os.PathLike] -def _resolve(name) -> ModuleType: - """If name is a string, resolve to a module.""" - if hasattr(name, '__spec__'): - return name - return import_module(name) - - -def _get_package(package) -> ModuleType: - """Take a package name or module object and return the module. - - If a name, the module is imported. If the resolved module - object is not a package, raise an exception. - """ - module = _resolve(package) - if module.__spec__.submodule_search_locations is None: - raise TypeError('{!r} is not a package'.format(package)) - return module - - -def _normalize_path(path) -> str: - """Normalize a path by ensuring it is a string. - - If the resulting string contains path separators, an exception is raised. - """ - parent, file_name = os.path.split(path) - if parent: - raise ValueError('{!r} must be only a file name'.format(path)) - return file_name - - -def _get_resource_reader( - package: ModuleType) -> Optional[resources_abc.ResourceReader]: - # Return the package's loader if it's a ResourceReader. We can't use - # a issubclass() check here because apparently abc.'s __subclasscheck__() - # hook wants to create a weak reference to the object, but - # zipimport.zipimporter does not support weak references, resulting in a - # TypeError. That seems terrible. - spec = package.__spec__ - if hasattr(spec.loader, 'get_resource_reader'): - return cast(resources_abc.ResourceReader, - spec.loader.get_resource_reader(spec.name)) - return None - - -def _check_location(package): - if package.__spec__.origin is None or not package.__spec__.has_location: - raise FileNotFoundError(f'Package has no location {package!r}') - - def open_binary(package: Package, resource: Resource) -> BinaryIO: """Return a file-like object opened for binary reading of the resource.""" - resource = _normalize_path(resource) - package = _get_package(package) - reader = _get_resource_reader(package) + resource = _common.normalize_path(resource) + package = _common.get_package(package) + reader = _common.get_resource_reader(package) if reader is not None: return reader.open_resource(resource) absolute_package_path = os.path.abspath( @@ -140,13 +89,6 @@ def read_text(package: Package, return fp.read() -def files(package: Package) -> resources_abc.Traversable: - """ - Get a Traversable resource from a package - """ - return _common.from_package(_get_package(package)) - - def path( package: Package, resource: Resource, ) -> 'ContextManager[Path]': @@ -158,17 +100,18 @@ def path( raised if the file was deleted prior to the context manager exiting). """ - reader = _get_resource_reader(_get_package(package)) + reader = _common.get_resource_reader(_common.get_package(package)) return ( _path_from_reader(reader, resource) if reader else - _common.as_file(files(package).joinpath(_normalize_path(resource))) + _common.as_file( + _common.files(package).joinpath(_common.normalize_path(resource))) ) @contextmanager def _path_from_reader(reader, resource): - norm_resource = _normalize_path(resource) + norm_resource = _common.normalize_path(resource) with suppress(FileNotFoundError): yield Path(reader.resource_path(norm_resource)) return @@ -182,9 +125,9 @@ def is_resource(package: Package, name: str) -> bool: Directories are *not* resources. """ - package = _get_package(package) - _normalize_path(name) - reader = _get_resource_reader(package) + package = _common.get_package(package) + _common.normalize_path(name) + reader = _common.get_resource_reader(package) if reader is not None: return reader.is_resource(name) package_contents = set(contents(package)) @@ -200,8 +143,8 @@ def contents(package: Package) -> Iterable[str]: not considered resources. Use `is_resource()` on each entry returned here to check if it is a resource or not. """ - package = _get_package(package) - reader = _get_resource_reader(package) + package = _common.get_package(package) + reader = _common.get_resource_reader(package) if reader is not None: return reader.contents() # Is the package a namespace package? By definition, namespace packages diff --git a/Lib/zipimport.py b/Lib/zipimport.py index 5ef0a17c2a5ed..080e0c4d986d6 100644 --- a/Lib/zipimport.py +++ b/Lib/zipimport.py @@ -280,11 +280,8 @@ def get_resource_reader(self, fullname): return None except ZipImportError: return None - if not _ZipImportResourceReader._registered: - from importlib.abc import ResourceReader - ResourceReader.register(_ZipImportResourceReader) - _ZipImportResourceReader._registered = True - return _ZipImportResourceReader(self, fullname) + from importlib.readers import ZipReader + return ZipReader(self, fullname) def __repr__(self): @@ -719,74 +716,3 @@ def _get_module_code(self, fullname): return code, ispackage, modpath else: raise ZipImportError(f"can't find module {fullname!r}", name=fullname) - - -class _ZipImportResourceReader: - """Private class used to support ZipImport.get_resource_reader(). - - This class is allowed to reference all the innards and private parts of - the zipimporter. - """ - _registered = False - - def __init__(self, zipimporter, fullname): - self.zipimporter = zipimporter - self.fullname = fullname - - def open_resource(self, resource): - fullname_as_path = self.fullname.replace('.', '/') - path = f'{fullname_as_path}/{resource}' - from io import BytesIO - try: - return BytesIO(self.zipimporter.get_data(path)) - except OSError: - raise FileNotFoundError(path) - - def resource_path(self, resource): - # All resources are in the zip file, so there is no path to the file. - # Raising FileNotFoundError tells the higher level API to extract the - # binary data and create a temporary file. - raise FileNotFoundError - - def is_resource(self, name): - # Maybe we could do better, but if we can get the data, it's a - # resource. Otherwise it isn't. - fullname_as_path = self.fullname.replace('.', '/') - path = f'{fullname_as_path}/{name}' - try: - self.zipimporter.get_data(path) - except OSError: - return False - return True - - def contents(self): - # This is a bit convoluted, because fullname will be a module path, - # but _files is a list of file names relative to the top of the - # archive's namespace. We want to compare file paths to find all the - # names of things inside the module represented by fullname. So we - # turn the module path of fullname into a file path relative to the - # top of the archive, and then we iterate through _files looking for - # names inside that "directory". - from pathlib import Path - fullname_path = Path(self.zipimporter.get_filename(self.fullname)) - relative_path = fullname_path.relative_to(self.zipimporter.archive) - # Don't forget that fullname names a package, so its path will include - # __init__.py, which we want to ignore. - assert relative_path.name == '__init__.py' - package_path = relative_path.parent - subdirs_seen = set() - for filename in self.zipimporter._files: - try: - relative = Path(filename).relative_to(package_path) - except ValueError: - continue - # If the path of the file (which is relative to the top of the zip - # namespace), relative to the package given when the resource - # reader was created, has a parent, then it's a name in a - # subdirectory and thus we skip it. - parent_name = relative.parent.name - if len(parent_name) == 0: - yield relative.name - elif parent_name not in subdirs_seen: - subdirs_seen.add(parent_name) - yield parent_name diff --git a/Misc/NEWS.d/next/Library/2020-06-02-02-16-02.bpo-39791.StCJlA.rst b/Misc/NEWS.d/next/Library/2020-06-02-02-16-02.bpo-39791.StCJlA.rst new file mode 100644 index 0000000000000..61753a57ca8b7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-02-02-16-02.bpo-39791.StCJlA.rst @@ -0,0 +1 @@ +Built-in loaders (SourceFileLoader and ZipImporter) now supply ``TraversableResources`` implementations for ``ResourceReader``, and the fallback function has been removed. diff --git a/Python/importlib_external.h b/Python/importlib_external.h index dd237428867ba..4d08e01b138c3 100644 --- a/Python/importlib_external.h +++ b/Python/importlib_external.h @@ -1396,15 +1396,13 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 0,0,29,3,0,0,115,14,0,0,0,8,2,8,8,8, 14,8,10,8,7,8,10,14,8,114,221,0,0,0,99,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4, - 0,0,0,0,0,0,0,115,124,0,0,0,101,0,90,1, + 0,0,0,0,0,0,0,115,92,0,0,0,101,0,90,1, 100,0,90,2,100,1,90,3,100,2,100,3,132,0,90,4, 100,4,100,5,132,0,90,5,100,6,100,7,132,0,90,6, 101,7,135,0,102,1,100,8,100,9,132,8,131,1,90,8, 101,7,100,10,100,11,132,0,131,1,90,9,100,12,100,13, 132,0,90,10,101,7,100,14,100,15,132,0,131,1,90,11, - 100,16,100,17,132,0,90,12,100,18,100,19,132,0,90,13, - 100,20,100,21,132,0,90,14,100,22,100,23,132,0,90,15, - 135,0,4,0,90,16,83,0,41,24,218,10,70,105,108,101, + 135,0,4,0,90,12,83,0,41,16,218,10,70,105,108,101, 76,111,97,100,101,114,122,103,66,97,115,101,32,102,105,108, 101,32,108,111,97,100,101,114,32,99,108,97,115,115,32,119, 104,105,99,104,32,105,109,112,108,101,109,101,110,116,115,32, @@ -1489,72 +1487,27 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 0,0,208,3,0,0,115,10,0,0,0,0,2,14,1,16, 1,40,2,14,1,122,19,70,105,108,101,76,111,97,100,101, 114,46,103,101,116,95,100,97,116,97,99,2,0,0,0,0, - 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,67, - 0,0,0,115,18,0,0,0,124,0,160,0,124,1,161,1, - 114,14,124,0,83,0,100,0,83,0,114,109,0,0,0,41, - 1,114,182,0,0,0,169,2,114,118,0,0,0,114,216,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,218,19,103,101,116,95,114,101,115,111,117,114,99,101,95, - 114,101,97,100,101,114,219,3,0,0,115,6,0,0,0,0, - 2,10,1,4,1,122,30,70,105,108,101,76,111,97,100,101, - 114,46,103,101,116,95,114,101,115,111,117,114,99,101,95,114, - 101,97,100,101,114,99,2,0,0,0,0,0,0,0,0,0, - 0,0,3,0,0,0,4,0,0,0,67,0,0,0,115,32, - 0,0,0,116,0,116,1,124,0,106,2,131,1,100,1,25, - 0,124,1,131,2,125,2,116,3,160,4,124,2,100,2,161, - 2,83,0,41,3,78,114,73,0,0,0,114,251,0,0,0, - 41,5,114,38,0,0,0,114,47,0,0,0,114,44,0,0, - 0,114,64,0,0,0,114,65,0,0,0,169,3,114,118,0, - 0,0,90,8,114,101,115,111,117,114,99,101,114,44,0,0, - 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 218,13,111,112,101,110,95,114,101,115,111,117,114,99,101,225, - 3,0,0,115,4,0,0,0,0,1,20,1,122,24,70,105, - 108,101,76,111,97,100,101,114,46,111,112,101,110,95,114,101, - 115,111,117,114,99,101,99,2,0,0,0,0,0,0,0,0, - 0,0,0,3,0,0,0,3,0,0,0,67,0,0,0,115, - 38,0,0,0,124,0,160,0,124,1,161,1,115,14,116,1, - 130,1,116,2,116,3,124,0,106,4,131,1,100,1,25,0, - 124,1,131,2,125,2,124,2,83,0,169,2,78,114,73,0, - 0,0,41,5,218,11,105,115,95,114,101,115,111,117,114,99, - 101,218,17,70,105,108,101,78,111,116,70,111,117,110,100,69, - 114,114,111,114,114,38,0,0,0,114,47,0,0,0,114,44, - 0,0,0,114,255,0,0,0,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,218,13,114,101,115,111,117,114,99, - 101,95,112,97,116,104,229,3,0,0,115,8,0,0,0,0, - 1,10,1,4,1,20,1,122,24,70,105,108,101,76,111,97, - 100,101,114,46,114,101,115,111,117,114,99,101,95,112,97,116, - 104,99,2,0,0,0,0,0,0,0,0,0,0,0,3,0, - 0,0,3,0,0,0,67,0,0,0,115,40,0,0,0,116, - 0,124,1,118,0,114,12,100,1,83,0,116,1,116,2,124, - 0,106,3,131,1,100,2,25,0,124,1,131,2,125,2,116, - 4,124,2,131,1,83,0,41,3,78,70,114,73,0,0,0, - 41,5,114,35,0,0,0,114,38,0,0,0,114,47,0,0, - 0,114,44,0,0,0,114,54,0,0,0,169,3,114,118,0, - 0,0,114,116,0,0,0,114,44,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,114,2,1,0,0, - 235,3,0,0,115,8,0,0,0,0,1,8,1,4,1,20, - 1,122,22,70,105,108,101,76,111,97,100,101,114,46,105,115, - 95,114,101,115,111,117,114,99,101,99,1,0,0,0,0,0, - 0,0,0,0,0,0,1,0,0,0,5,0,0,0,67,0, - 0,0,115,24,0,0,0,116,0,116,1,160,2,116,3,124, - 0,106,4,131,1,100,1,25,0,161,1,131,1,83,0,114, - 1,1,0,0,41,5,218,4,105,116,101,114,114,4,0,0, - 0,218,7,108,105,115,116,100,105,114,114,47,0,0,0,114, - 44,0,0,0,114,246,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,218,8,99,111,110,116,101,110, - 116,115,241,3,0,0,115,2,0,0,0,0,1,122,19,70, - 105,108,101,76,111,97,100,101,114,46,99,111,110,116,101,110, - 116,115,41,17,114,125,0,0,0,114,124,0,0,0,114,126, - 0,0,0,114,127,0,0,0,114,209,0,0,0,114,243,0, - 0,0,114,247,0,0,0,114,136,0,0,0,114,220,0,0, - 0,114,179,0,0,0,114,227,0,0,0,114,254,0,0,0, - 114,0,1,0,0,114,4,1,0,0,114,2,1,0,0,114, - 8,1,0,0,90,13,95,95,99,108,97,115,115,99,101,108, - 108,95,95,114,5,0,0,0,114,5,0,0,0,114,249,0, - 0,0,114,8,0,0,0,114,239,0,0,0,173,3,0,0, - 115,30,0,0,0,8,2,4,3,8,6,8,4,8,3,2, - 1,14,11,2,1,10,4,8,11,2,1,10,5,8,4,8, - 6,8,6,114,239,0,0,0,99,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,3,0,0,0,2,0,0,0,67, + 0,0,0,115,20,0,0,0,100,1,100,2,108,0,109,1, + 125,2,1,0,124,2,124,0,131,1,83,0,41,3,78,114, + 73,0,0,0,41,1,218,10,70,105,108,101,82,101,97,100, + 101,114,41,2,90,17,105,109,112,111,114,116,108,105,98,46, + 114,101,97,100,101,114,115,114,253,0,0,0,41,3,114,118, + 0,0,0,114,216,0,0,0,114,253,0,0,0,114,5,0, + 0,0,114,5,0,0,0,114,8,0,0,0,218,19,103,101, + 116,95,114,101,115,111,117,114,99,101,95,114,101,97,100,101, + 114,217,3,0,0,115,4,0,0,0,0,2,12,1,122,30, + 70,105,108,101,76,111,97,100,101,114,46,103,101,116,95,114, + 101,115,111,117,114,99,101,95,114,101,97,100,101,114,41,13, + 114,125,0,0,0,114,124,0,0,0,114,126,0,0,0,114, + 127,0,0,0,114,209,0,0,0,114,243,0,0,0,114,247, + 0,0,0,114,136,0,0,0,114,220,0,0,0,114,179,0, + 0,0,114,227,0,0,0,114,254,0,0,0,90,13,95,95, + 99,108,97,115,115,99,101,108,108,95,95,114,5,0,0,0, + 114,5,0,0,0,114,249,0,0,0,114,8,0,0,0,114, + 239,0,0,0,173,3,0,0,115,22,0,0,0,8,2,4, + 3,8,6,8,4,8,3,2,1,14,11,2,1,10,4,8, + 9,2,1,114,239,0,0,0,99,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,3,0,0,0,64,0,0, 0,115,46,0,0,0,101,0,90,1,100,0,90,2,100,1, 90,3,100,2,100,3,132,0,90,4,100,4,100,5,132,0, @@ -1574,7 +1527,7 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 49,0,0,0,218,8,115,116,95,109,116,105,109,101,90,7, 115,116,95,115,105,122,101,41,3,114,118,0,0,0,114,44, 0,0,0,114,238,0,0,0,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,224,0,0,0,249,3,0,0, + 0,0,114,8,0,0,0,114,224,0,0,0,227,3,0,0, 115,4,0,0,0,0,2,8,1,122,27,83,111,117,114,99, 101,70,105,108,101,76,111,97,100,101,114,46,112,97,116,104, 95,115,116,97,116,115,99,4,0,0,0,0,0,0,0,0, @@ -1585,10 +1538,10 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 225,0,0,0,41,5,114,118,0,0,0,114,107,0,0,0, 114,106,0,0,0,114,26,0,0,0,114,52,0,0,0,114, 5,0,0,0,114,5,0,0,0,114,8,0,0,0,114,226, - 0,0,0,254,3,0,0,115,4,0,0,0,0,2,8,1, + 0,0,0,232,3,0,0,115,4,0,0,0,0,2,8,1, 122,32,83,111,117,114,99,101,70,105,108,101,76,111,97,100, 101,114,46,95,99,97,99,104,101,95,98,121,116,101,99,111, - 100,101,114,60,0,0,0,114,11,1,0,0,99,3,0,0, + 100,101,114,60,0,0,0,114,1,1,0,0,99,3,0,0, 0,0,0,0,0,1,0,0,0,9,0,0,0,11,0,0, 0,67,0,0,0,115,252,0,0,0,116,0,124,1,131,1, 92,2,125,4,125,5,103,0,125,6,124,4,114,52,116,1, @@ -1616,11 +1569,11 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 0,90,5,109,107,100,105,114,218,15,70,105,108,101,69,120, 105,115,116,115,69,114,114,111,114,114,50,0,0,0,114,134, 0,0,0,114,149,0,0,0,114,69,0,0,0,41,9,114, - 118,0,0,0,114,44,0,0,0,114,26,0,0,0,114,12, + 118,0,0,0,114,44,0,0,0,114,26,0,0,0,114,2, 1,0,0,218,6,112,97,114,101,110,116,114,96,0,0,0, 114,37,0,0,0,114,33,0,0,0,114,228,0,0,0,114, 5,0,0,0,114,5,0,0,0,114,8,0,0,0,114,225, - 0,0,0,3,4,0,0,115,46,0,0,0,0,2,12,1, + 0,0,0,237,3,0,0,115,46,0,0,0,0,2,12,1, 4,2,12,1,12,1,12,2,12,1,10,1,2,1,14,1, 12,2,8,1,14,3,6,1,4,255,4,2,28,1,2,1, 12,1,16,1,16,2,8,1,2,255,122,25,83,111,117,114, @@ -1629,8 +1582,8 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 0,0,114,126,0,0,0,114,127,0,0,0,114,224,0,0, 0,114,226,0,0,0,114,225,0,0,0,114,5,0,0,0, 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 9,1,0,0,245,3,0,0,115,8,0,0,0,8,2,4, - 2,8,5,8,5,114,9,1,0,0,99,0,0,0,0,0, + 255,0,0,0,223,3,0,0,115,8,0,0,0,8,2,4, + 2,8,5,8,5,114,255,0,0,0,99,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,64, 0,0,0,115,32,0,0,0,101,0,90,1,100,0,90,2, 100,1,90,3,100,2,100,3,132,0,90,4,100,4,100,5, @@ -1651,7 +1604,7 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 114,235,0,0,0,41,5,114,118,0,0,0,114,139,0,0, 0,114,44,0,0,0,114,26,0,0,0,114,151,0,0,0, 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 213,0,0,0,38,4,0,0,115,22,0,0,0,0,1,10, + 213,0,0,0,16,4,0,0,115,22,0,0,0,0,1,10, 1,10,4,2,1,2,254,6,4,12,1,2,1,14,1,2, 1,2,253,122,29,83,111,117,114,99,101,108,101,115,115,70, 105,108,101,76,111,97,100,101,114,46,103,101,116,95,99,111, @@ -1661,15 +1614,15 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 111,110,101,32,97,115,32,116,104,101,114,101,32,105,115,32, 110,111,32,115,111,117,114,99,101,32,99,111,100,101,46,78, 114,5,0,0,0,114,219,0,0,0,114,5,0,0,0,114, - 5,0,0,0,114,8,0,0,0,114,229,0,0,0,54,4, + 5,0,0,0,114,8,0,0,0,114,229,0,0,0,32,4, 0,0,115,2,0,0,0,0,2,122,31,83,111,117,114,99, 101,108,101,115,115,70,105,108,101,76,111,97,100,101,114,46, 103,101,116,95,115,111,117,114,99,101,78,41,6,114,125,0, 0,0,114,124,0,0,0,114,126,0,0,0,114,127,0,0, 0,114,213,0,0,0,114,229,0,0,0,114,5,0,0,0, 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 15,1,0,0,34,4,0,0,115,6,0,0,0,8,2,4, - 2,8,16,114,15,1,0,0,99,0,0,0,0,0,0,0, + 5,1,0,0,12,4,0,0,115,6,0,0,0,8,2,4, + 2,8,16,114,5,1,0,0,99,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,3,0,0,0,64,0,0, 0,115,92,0,0,0,101,0,90,1,100,0,90,2,100,1, 90,3,100,2,100,3,132,0,90,4,100,4,100,5,132,0, @@ -1687,1049 +1640,1051 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 0,0,0,0,0,3,0,0,0,2,0,0,0,67,0,0, 0,115,16,0,0,0,124,1,124,0,95,0,124,2,124,0, 95,1,100,0,83,0,114,109,0,0,0,114,159,0,0,0, - 114,5,1,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,209,0,0,0,71,4,0,0,115,4,0, - 0,0,0,1,6,1,122,28,69,120,116,101,110,115,105,111, - 110,70,105,108,101,76,111,97,100,101,114,46,95,95,105,110, - 105,116,95,95,99,2,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,2,0,0,0,67,0,0,0,115,24,0, - 0,0,124,0,106,0,124,1,106,0,107,2,111,22,124,0, - 106,1,124,1,106,1,107,2,83,0,114,109,0,0,0,114, - 240,0,0,0,114,242,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,243,0,0,0,75,4,0, - 0,115,6,0,0,0,0,1,12,1,10,255,122,26,69,120, - 116,101,110,115,105,111,110,70,105,108,101,76,111,97,100,101, - 114,46,95,95,101,113,95,95,99,1,0,0,0,0,0,0, - 0,0,0,0,0,1,0,0,0,3,0,0,0,67,0,0, - 0,115,20,0,0,0,116,0,124,0,106,1,131,1,116,0, - 124,0,106,2,131,1,65,0,83,0,114,109,0,0,0,114, - 244,0,0,0,114,246,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,247,0,0,0,79,4,0, - 0,115,2,0,0,0,0,1,122,28,69,120,116,101,110,115, + 41,3,114,118,0,0,0,114,116,0,0,0,114,44,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,209,0,0,0,49,4,0,0,115,4,0,0,0,0,1, + 6,1,122,28,69,120,116,101,110,115,105,111,110,70,105,108, + 101,76,111,97,100,101,114,46,95,95,105,110,105,116,95,95, + 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,2,0,0,0,67,0,0,0,115,24,0,0,0,124,0, + 106,0,124,1,106,0,107,2,111,22,124,0,106,1,124,1, + 106,1,107,2,83,0,114,109,0,0,0,114,240,0,0,0, + 114,242,0,0,0,114,5,0,0,0,114,5,0,0,0,114, + 8,0,0,0,114,243,0,0,0,53,4,0,0,115,6,0, + 0,0,0,1,12,1,10,255,122,26,69,120,116,101,110,115, 105,111,110,70,105,108,101,76,111,97,100,101,114,46,95,95, - 104,97,115,104,95,95,99,2,0,0,0,0,0,0,0,0, - 0,0,0,3,0,0,0,5,0,0,0,67,0,0,0,115, - 36,0,0,0,116,0,160,1,116,2,106,3,124,1,161,2, - 125,2,116,0,160,4,100,1,124,1,106,5,124,0,106,6, - 161,3,1,0,124,2,83,0,41,2,122,38,67,114,101,97, - 116,101,32,97,110,32,117,110,105,116,105,97,108,105,122,101, - 100,32,101,120,116,101,110,115,105,111,110,32,109,111,100,117, - 108,101,122,38,101,120,116,101,110,115,105,111,110,32,109,111, - 100,117,108,101,32,123,33,114,125,32,108,111,97,100,101,100, - 32,102,114,111,109,32,123,33,114,125,41,7,114,134,0,0, - 0,114,214,0,0,0,114,163,0,0,0,90,14,99,114,101, - 97,116,101,95,100,121,110,97,109,105,99,114,149,0,0,0, - 114,116,0,0,0,114,44,0,0,0,41,3,114,118,0,0, - 0,114,187,0,0,0,114,216,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,212,0,0,0,82, - 4,0,0,115,14,0,0,0,0,2,4,1,6,255,4,2, - 6,1,8,255,4,2,122,33,69,120,116,101,110,115,105,111, - 110,70,105,108,101,76,111,97,100,101,114,46,99,114,101,97, - 116,101,95,109,111,100,117,108,101,99,2,0,0,0,0,0, - 0,0,0,0,0,0,2,0,0,0,5,0,0,0,67,0, - 0,0,115,36,0,0,0,116,0,160,1,116,2,106,3,124, - 1,161,2,1,0,116,0,160,4,100,1,124,0,106,5,124, - 0,106,6,161,3,1,0,100,2,83,0,41,3,122,30,73, - 110,105,116,105,97,108,105,122,101,32,97,110,32,101,120,116, - 101,110,115,105,111,110,32,109,111,100,117,108,101,122,40,101, - 120,116,101,110,115,105,111,110,32,109,111,100,117,108,101,32, - 123,33,114,125,32,101,120,101,99,117,116,101,100,32,102,114, - 111,109,32,123,33,114,125,78,41,7,114,134,0,0,0,114, - 214,0,0,0,114,163,0,0,0,90,12,101,120,101,99,95, + 101,113,95,95,99,1,0,0,0,0,0,0,0,0,0,0, + 0,1,0,0,0,3,0,0,0,67,0,0,0,115,20,0, + 0,0,116,0,124,0,106,1,131,1,116,0,124,0,106,2, + 131,1,65,0,83,0,114,109,0,0,0,114,244,0,0,0, + 114,246,0,0,0,114,5,0,0,0,114,5,0,0,0,114, + 8,0,0,0,114,247,0,0,0,57,4,0,0,115,2,0, + 0,0,0,1,122,28,69,120,116,101,110,115,105,111,110,70, + 105,108,101,76,111,97,100,101,114,46,95,95,104,97,115,104, + 95,95,99,2,0,0,0,0,0,0,0,0,0,0,0,3, + 0,0,0,5,0,0,0,67,0,0,0,115,36,0,0,0, + 116,0,160,1,116,2,106,3,124,1,161,2,125,2,116,0, + 160,4,100,1,124,1,106,5,124,0,106,6,161,3,1,0, + 124,2,83,0,41,2,122,38,67,114,101,97,116,101,32,97, + 110,32,117,110,105,116,105,97,108,105,122,101,100,32,101,120, + 116,101,110,115,105,111,110,32,109,111,100,117,108,101,122,38, + 101,120,116,101,110,115,105,111,110,32,109,111,100,117,108,101, + 32,123,33,114,125,32,108,111,97,100,101,100,32,102,114,111, + 109,32,123,33,114,125,41,7,114,134,0,0,0,114,214,0, + 0,0,114,163,0,0,0,90,14,99,114,101,97,116,101,95, 100,121,110,97,109,105,99,114,149,0,0,0,114,116,0,0, - 0,114,44,0,0,0,114,253,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,217,0,0,0,90, - 4,0,0,115,8,0,0,0,0,2,14,1,6,1,8,255, - 122,31,69,120,116,101,110,115,105,111,110,70,105,108,101,76, - 111,97,100,101,114,46,101,120,101,99,95,109,111,100,117,108, - 101,99,2,0,0,0,0,0,0,0,0,0,0,0,2,0, - 0,0,4,0,0,0,3,0,0,0,115,36,0,0,0,116, - 0,124,0,106,1,131,1,100,1,25,0,137,0,116,2,135, - 0,102,1,100,2,100,3,132,8,116,3,68,0,131,1,131, - 1,83,0,41,4,122,49,82,101,116,117,114,110,32,84,114, - 117,101,32,105,102,32,116,104,101,32,101,120,116,101,110,115, - 105,111,110,32,109,111,100,117,108,101,32,105,115,32,97,32, - 112,97,99,107,97,103,101,46,114,39,0,0,0,99,1,0, - 0,0,0,0,0,0,0,0,0,0,2,0,0,0,4,0, - 0,0,51,0,0,0,115,26,0,0,0,124,0,93,18,125, - 1,136,0,100,0,124,1,23,0,107,2,86,0,1,0,113, - 2,100,1,83,0,41,2,114,209,0,0,0,78,114,5,0, - 0,0,169,2,114,32,0,0,0,218,6,115,117,102,102,105, - 120,169,1,90,9,102,105,108,101,95,110,97,109,101,114,5, - 0,0,0,114,8,0,0,0,218,9,60,103,101,110,101,120, - 112,114,62,99,4,0,0,115,4,0,0,0,4,1,2,255, - 122,49,69,120,116,101,110,115,105,111,110,70,105,108,101,76, - 111,97,100,101,114,46,105,115,95,112,97,99,107,97,103,101, - 46,60,108,111,99,97,108,115,62,46,60,103,101,110,101,120, - 112,114,62,41,4,114,47,0,0,0,114,44,0,0,0,218, - 3,97,110,121,218,18,69,88,84,69,78,83,73,79,78,95, - 83,85,70,70,73,88,69,83,114,219,0,0,0,114,5,0, - 0,0,114,18,1,0,0,114,8,0,0,0,114,182,0,0, - 0,96,4,0,0,115,8,0,0,0,0,2,14,1,12,1, - 2,255,122,30,69,120,116,101,110,115,105,111,110,70,105,108, - 101,76,111,97,100,101,114,46,105,115,95,112,97,99,107,97, - 103,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,1,0,0,0,67,0,0,0,115,4,0,0,0, - 100,1,83,0,41,2,122,63,82,101,116,117,114,110,32,78, - 111,110,101,32,97,115,32,97,110,32,101,120,116,101,110,115, - 105,111,110,32,109,111,100,117,108,101,32,99,97,110,110,111, - 116,32,99,114,101,97,116,101,32,97,32,99,111,100,101,32, - 111,98,106,101,99,116,46,78,114,5,0,0,0,114,219,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,114,213,0,0,0,102,4,0,0,115,2,0,0,0,0, - 2,122,28,69,120,116,101,110,115,105,111,110,70,105,108,101, - 76,111,97,100,101,114,46,103,101,116,95,99,111,100,101,99, - 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 1,0,0,0,67,0,0,0,115,4,0,0,0,100,1,83, - 0,41,2,122,53,82,101,116,117,114,110,32,78,111,110,101, - 32,97,115,32,101,120,116,101,110,115,105,111,110,32,109,111, - 100,117,108,101,115,32,104,97,118,101,32,110,111,32,115,111, - 117,114,99,101,32,99,111,100,101,46,78,114,5,0,0,0, + 0,114,44,0,0,0,41,3,114,118,0,0,0,114,187,0, + 0,0,114,216,0,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,114,212,0,0,0,60,4,0,0,115, + 14,0,0,0,0,2,4,1,6,255,4,2,6,1,8,255, + 4,2,122,33,69,120,116,101,110,115,105,111,110,70,105,108, + 101,76,111,97,100,101,114,46,99,114,101,97,116,101,95,109, + 111,100,117,108,101,99,2,0,0,0,0,0,0,0,0,0, + 0,0,2,0,0,0,5,0,0,0,67,0,0,0,115,36, + 0,0,0,116,0,160,1,116,2,106,3,124,1,161,2,1, + 0,116,0,160,4,100,1,124,0,106,5,124,0,106,6,161, + 3,1,0,100,2,83,0,41,3,122,30,73,110,105,116,105, + 97,108,105,122,101,32,97,110,32,101,120,116,101,110,115,105, + 111,110,32,109,111,100,117,108,101,122,40,101,120,116,101,110, + 115,105,111,110,32,109,111,100,117,108,101,32,123,33,114,125, + 32,101,120,101,99,117,116,101,100,32,102,114,111,109,32,123, + 33,114,125,78,41,7,114,134,0,0,0,114,214,0,0,0, + 114,163,0,0,0,90,12,101,120,101,99,95,100,121,110,97, + 109,105,99,114,149,0,0,0,114,116,0,0,0,114,44,0, + 0,0,169,2,114,118,0,0,0,114,216,0,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,114,217,0, + 0,0,68,4,0,0,115,8,0,0,0,0,2,14,1,6, + 1,8,255,122,31,69,120,116,101,110,115,105,111,110,70,105, + 108,101,76,111,97,100,101,114,46,101,120,101,99,95,109,111, + 100,117,108,101,99,2,0,0,0,0,0,0,0,0,0,0, + 0,2,0,0,0,4,0,0,0,3,0,0,0,115,36,0, + 0,0,116,0,124,0,106,1,131,1,100,1,25,0,137,0, + 116,2,135,0,102,1,100,2,100,3,132,8,116,3,68,0, + 131,1,131,1,83,0,41,4,122,49,82,101,116,117,114,110, + 32,84,114,117,101,32,105,102,32,116,104,101,32,101,120,116, + 101,110,115,105,111,110,32,109,111,100,117,108,101,32,105,115, + 32,97,32,112,97,99,107,97,103,101,46,114,39,0,0,0, + 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,4,0,0,0,51,0,0,0,115,26,0,0,0,124,0, + 93,18,125,1,136,0,100,0,124,1,23,0,107,2,86,0, + 1,0,113,2,100,1,83,0,41,2,114,209,0,0,0,78, + 114,5,0,0,0,169,2,114,32,0,0,0,218,6,115,117, + 102,102,105,120,169,1,90,9,102,105,108,101,95,110,97,109, + 101,114,5,0,0,0,114,8,0,0,0,218,9,60,103,101, + 110,101,120,112,114,62,77,4,0,0,115,4,0,0,0,4, + 1,2,255,122,49,69,120,116,101,110,115,105,111,110,70,105, + 108,101,76,111,97,100,101,114,46,105,115,95,112,97,99,107, + 97,103,101,46,60,108,111,99,97,108,115,62,46,60,103,101, + 110,101,120,112,114,62,41,4,114,47,0,0,0,114,44,0, + 0,0,218,3,97,110,121,218,18,69,88,84,69,78,83,73, + 79,78,95,83,85,70,70,73,88,69,83,114,219,0,0,0, + 114,5,0,0,0,114,9,1,0,0,114,8,0,0,0,114, + 182,0,0,0,74,4,0,0,115,8,0,0,0,0,2,14, + 1,12,1,2,255,122,30,69,120,116,101,110,115,105,111,110, + 70,105,108,101,76,111,97,100,101,114,46,105,115,95,112,97, + 99,107,97,103,101,99,2,0,0,0,0,0,0,0,0,0, + 0,0,2,0,0,0,1,0,0,0,67,0,0,0,115,4, + 0,0,0,100,1,83,0,41,2,122,63,82,101,116,117,114, + 110,32,78,111,110,101,32,97,115,32,97,110,32,101,120,116, + 101,110,115,105,111,110,32,109,111,100,117,108,101,32,99,97, + 110,110,111,116,32,99,114,101,97,116,101,32,97,32,99,111, + 100,101,32,111,98,106,101,99,116,46,78,114,5,0,0,0, 114,219,0,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,229,0,0,0,106,4,0,0,115,2,0, - 0,0,0,2,122,30,69,120,116,101,110,115,105,111,110,70, - 105,108,101,76,111,97,100,101,114,46,103,101,116,95,115,111, - 117,114,99,101,99,2,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,1,0,0,0,67,0,0,0,115,6,0, - 0,0,124,0,106,0,83,0,114,250,0,0,0,114,48,0, - 0,0,114,219,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,179,0,0,0,110,4,0,0,115, - 2,0,0,0,0,3,122,32,69,120,116,101,110,115,105,111, - 110,70,105,108,101,76,111,97,100,101,114,46,103,101,116,95, - 102,105,108,101,110,97,109,101,78,41,14,114,125,0,0,0, - 114,124,0,0,0,114,126,0,0,0,114,127,0,0,0,114, - 209,0,0,0,114,243,0,0,0,114,247,0,0,0,114,212, - 0,0,0,114,217,0,0,0,114,182,0,0,0,114,213,0, - 0,0,114,229,0,0,0,114,136,0,0,0,114,179,0,0, - 0,114,5,0,0,0,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,114,252,0,0,0,63,4,0,0,115,22, - 0,0,0,8,2,4,6,8,4,8,4,8,3,8,8,8, - 6,8,6,8,4,8,4,2,1,114,252,0,0,0,99,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,64,0,0,0,115,104,0,0,0,101,0,90,1, - 100,0,90,2,100,1,90,3,100,2,100,3,132,0,90,4, - 100,4,100,5,132,0,90,5,100,6,100,7,132,0,90,6, - 100,8,100,9,132,0,90,7,100,10,100,11,132,0,90,8, - 100,12,100,13,132,0,90,9,100,14,100,15,132,0,90,10, - 100,16,100,17,132,0,90,11,100,18,100,19,132,0,90,12, - 100,20,100,21,132,0,90,13,100,22,100,23,132,0,90,14, - 100,24,83,0,41,25,218,14,95,78,97,109,101,115,112,97, - 99,101,80,97,116,104,97,38,1,0,0,82,101,112,114,101, - 115,101,110,116,115,32,97,32,110,97,109,101,115,112,97,99, - 101,32,112,97,99,107,97,103,101,39,115,32,112,97,116,104, - 46,32,32,73,116,32,117,115,101,115,32,116,104,101,32,109, - 111,100,117,108,101,32,110,97,109,101,10,32,32,32,32,116, - 111,32,102,105,110,100,32,105,116,115,32,112,97,114,101,110, - 116,32,109,111,100,117,108,101,44,32,97,110,100,32,102,114, - 111,109,32,116,104,101,114,101,32,105,116,32,108,111,111,107, - 115,32,117,112,32,116,104,101,32,112,97,114,101,110,116,39, - 115,10,32,32,32,32,95,95,112,97,116,104,95,95,46,32, - 32,87,104,101,110,32,116,104,105,115,32,99,104,97,110,103, - 101,115,44,32,116,104,101,32,109,111,100,117,108,101,39,115, - 32,111,119,110,32,112,97,116,104,32,105,115,32,114,101,99, - 111,109,112,117,116,101,100,44,10,32,32,32,32,117,115,105, - 110,103,32,112,97,116,104,95,102,105,110,100,101,114,46,32, - 32,70,111,114,32,116,111,112,45,108,101,118,101,108,32,109, - 111,100,117,108,101,115,44,32,116,104,101,32,112,97,114,101, - 110,116,32,109,111,100,117,108,101,39,115,32,112,97,116,104, - 10,32,32,32,32,105,115,32,115,121,115,46,112,97,116,104, - 46,99,4,0,0,0,0,0,0,0,0,0,0,0,4,0, - 0,0,3,0,0,0,67,0,0,0,115,36,0,0,0,124, - 1,124,0,95,0,124,2,124,0,95,1,116,2,124,0,160, - 3,161,0,131,1,124,0,95,4,124,3,124,0,95,5,100, - 0,83,0,114,109,0,0,0,41,6,218,5,95,110,97,109, - 101,218,5,95,112,97,116,104,114,111,0,0,0,218,16,95, - 103,101,116,95,112,97,114,101,110,116,95,112,97,116,104,218, - 17,95,108,97,115,116,95,112,97,114,101,110,116,95,112,97, - 116,104,218,12,95,112,97,116,104,95,102,105,110,100,101,114, - 169,4,114,118,0,0,0,114,116,0,0,0,114,44,0,0, - 0,90,11,112,97,116,104,95,102,105,110,100,101,114,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,114,209,0, - 0,0,123,4,0,0,115,8,0,0,0,0,1,6,1,6, - 1,14,1,122,23,95,78,97,109,101,115,112,97,99,101,80, - 97,116,104,46,95,95,105,110,105,116,95,95,99,1,0,0, - 0,0,0,0,0,0,0,0,0,4,0,0,0,3,0,0, - 0,67,0,0,0,115,38,0,0,0,124,0,106,0,160,1, - 100,1,161,1,92,3,125,1,125,2,125,3,124,2,100,2, - 107,2,114,30,100,3,83,0,124,1,100,4,102,2,83,0, - 41,5,122,62,82,101,116,117,114,110,115,32,97,32,116,117, - 112,108,101,32,111,102,32,40,112,97,114,101,110,116,45,109, - 111,100,117,108,101,45,110,97,109,101,44,32,112,97,114,101, - 110,116,45,112,97,116,104,45,97,116,116,114,45,110,97,109, - 101,41,114,71,0,0,0,114,40,0,0,0,41,2,114,1, - 0,0,0,114,44,0,0,0,90,8,95,95,112,97,116,104, - 95,95,41,2,114,23,1,0,0,114,41,0,0,0,41,4, - 114,118,0,0,0,114,14,1,0,0,218,3,100,111,116,90, - 2,109,101,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,218,23,95,102,105,110,100,95,112,97,114,101,110,116, - 95,112,97,116,104,95,110,97,109,101,115,129,4,0,0,115, - 8,0,0,0,0,2,18,1,8,2,4,3,122,38,95,78, - 97,109,101,115,112,97,99,101,80,97,116,104,46,95,102,105, - 110,100,95,112,97,114,101,110,116,95,112,97,116,104,95,110, - 97,109,101,115,99,1,0,0,0,0,0,0,0,0,0,0, - 0,3,0,0,0,3,0,0,0,67,0,0,0,115,28,0, - 0,0,124,0,160,0,161,0,92,2,125,1,125,2,116,1, - 116,2,106,3,124,1,25,0,124,2,131,2,83,0,114,109, - 0,0,0,41,4,114,30,1,0,0,114,130,0,0,0,114, - 1,0,0,0,218,7,109,111,100,117,108,101,115,41,3,114, - 118,0,0,0,90,18,112,97,114,101,110,116,95,109,111,100, - 117,108,101,95,110,97,109,101,90,14,112,97,116,104,95,97, - 116,116,114,95,110,97,109,101,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,25,1,0,0,139,4,0,0, - 115,4,0,0,0,0,1,12,1,122,31,95,78,97,109,101, - 115,112,97,99,101,80,97,116,104,46,95,103,101,116,95,112, - 97,114,101,110,116,95,112,97,116,104,99,1,0,0,0,0, - 0,0,0,0,0,0,0,3,0,0,0,4,0,0,0,67, - 0,0,0,115,80,0,0,0,116,0,124,0,160,1,161,0, - 131,1,125,1,124,1,124,0,106,2,107,3,114,74,124,0, - 160,3,124,0,106,4,124,1,161,2,125,2,124,2,100,0, - 117,1,114,68,124,2,106,5,100,0,117,0,114,68,124,2, - 106,6,114,68,124,2,106,6,124,0,95,7,124,1,124,0, - 95,2,124,0,106,7,83,0,114,109,0,0,0,41,8,114, - 111,0,0,0,114,25,1,0,0,114,26,1,0,0,114,27, - 1,0,0,114,23,1,0,0,114,140,0,0,0,114,178,0, - 0,0,114,24,1,0,0,41,3,114,118,0,0,0,90,11, - 112,97,114,101,110,116,95,112,97,116,104,114,187,0,0,0, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, - 12,95,114,101,99,97,108,99,117,108,97,116,101,143,4,0, - 0,115,16,0,0,0,0,2,12,1,10,1,14,3,18,1, - 6,1,8,1,6,1,122,27,95,78,97,109,101,115,112,97, - 99,101,80,97,116,104,46,95,114,101,99,97,108,99,117,108, - 97,116,101,99,1,0,0,0,0,0,0,0,0,0,0,0, - 1,0,0,0,3,0,0,0,67,0,0,0,115,12,0,0, - 0,116,0,124,0,160,1,161,0,131,1,83,0,114,109,0, - 0,0,41,2,114,6,1,0,0,114,32,1,0,0,114,246, - 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,218,8,95,95,105,116,101,114,95,95,156,4,0,0, - 115,2,0,0,0,0,1,122,23,95,78,97,109,101,115,112, - 97,99,101,80,97,116,104,46,95,95,105,116,101,114,95,95, - 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,2,0,0,0,67,0,0,0,115,12,0,0,0,124,0, - 160,0,161,0,124,1,25,0,83,0,114,109,0,0,0,169, - 1,114,32,1,0,0,41,2,114,118,0,0,0,218,5,105, - 110,100,101,120,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,218,11,95,95,103,101,116,105,116,101,109,95,95, - 159,4,0,0,115,2,0,0,0,0,1,122,26,95,78,97, - 109,101,115,112,97,99,101,80,97,116,104,46,95,95,103,101, - 116,105,116,101,109,95,95,99,3,0,0,0,0,0,0,0, + 8,0,0,0,114,213,0,0,0,80,4,0,0,115,2,0, + 0,0,0,2,122,28,69,120,116,101,110,115,105,111,110,70, + 105,108,101,76,111,97,100,101,114,46,103,101,116,95,99,111, + 100,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,1,0,0,0,67,0,0,0,115,4,0,0,0, + 100,1,83,0,41,2,122,53,82,101,116,117,114,110,32,78, + 111,110,101,32,97,115,32,101,120,116,101,110,115,105,111,110, + 32,109,111,100,117,108,101,115,32,104,97,118,101,32,110,111, + 32,115,111,117,114,99,101,32,99,111,100,101,46,78,114,5, + 0,0,0,114,219,0,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,229,0,0,0,84,4,0,0, + 115,2,0,0,0,0,2,122,30,69,120,116,101,110,115,105, + 111,110,70,105,108,101,76,111,97,100,101,114,46,103,101,116, + 95,115,111,117,114,99,101,99,2,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,1,0,0,0,67,0,0,0, + 115,6,0,0,0,124,0,106,0,83,0,114,250,0,0,0, + 114,48,0,0,0,114,219,0,0,0,114,5,0,0,0,114, + 5,0,0,0,114,8,0,0,0,114,179,0,0,0,88,4, + 0,0,115,2,0,0,0,0,3,122,32,69,120,116,101,110, + 115,105,111,110,70,105,108,101,76,111,97,100,101,114,46,103, + 101,116,95,102,105,108,101,110,97,109,101,78,41,14,114,125, + 0,0,0,114,124,0,0,0,114,126,0,0,0,114,127,0, + 0,0,114,209,0,0,0,114,243,0,0,0,114,247,0,0, + 0,114,212,0,0,0,114,217,0,0,0,114,182,0,0,0, + 114,213,0,0,0,114,229,0,0,0,114,136,0,0,0,114, + 179,0,0,0,114,5,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,252,0,0,0,41,4,0, + 0,115,22,0,0,0,8,2,4,6,8,4,8,4,8,3, + 8,8,8,6,8,6,8,4,8,4,2,1,114,252,0,0, + 0,99,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,2,0,0,0,64,0,0,0,115,104,0,0,0,101, + 0,90,1,100,0,90,2,100,1,90,3,100,2,100,3,132, + 0,90,4,100,4,100,5,132,0,90,5,100,6,100,7,132, + 0,90,6,100,8,100,9,132,0,90,7,100,10,100,11,132, + 0,90,8,100,12,100,13,132,0,90,9,100,14,100,15,132, + 0,90,10,100,16,100,17,132,0,90,11,100,18,100,19,132, + 0,90,12,100,20,100,21,132,0,90,13,100,22,100,23,132, + 0,90,14,100,24,83,0,41,25,218,14,95,78,97,109,101, + 115,112,97,99,101,80,97,116,104,97,38,1,0,0,82,101, + 112,114,101,115,101,110,116,115,32,97,32,110,97,109,101,115, + 112,97,99,101,32,112,97,99,107,97,103,101,39,115,32,112, + 97,116,104,46,32,32,73,116,32,117,115,101,115,32,116,104, + 101,32,109,111,100,117,108,101,32,110,97,109,101,10,32,32, + 32,32,116,111,32,102,105,110,100,32,105,116,115,32,112,97, + 114,101,110,116,32,109,111,100,117,108,101,44,32,97,110,100, + 32,102,114,111,109,32,116,104,101,114,101,32,105,116,32,108, + 111,111,107,115,32,117,112,32,116,104,101,32,112,97,114,101, + 110,116,39,115,10,32,32,32,32,95,95,112,97,116,104,95, + 95,46,32,32,87,104,101,110,32,116,104,105,115,32,99,104, + 97,110,103,101,115,44,32,116,104,101,32,109,111,100,117,108, + 101,39,115,32,111,119,110,32,112,97,116,104,32,105,115,32, + 114,101,99,111,109,112,117,116,101,100,44,10,32,32,32,32, + 117,115,105,110,103,32,112,97,116,104,95,102,105,110,100,101, + 114,46,32,32,70,111,114,32,116,111,112,45,108,101,118,101, + 108,32,109,111,100,117,108,101,115,44,32,116,104,101,32,112, + 97,114,101,110,116,32,109,111,100,117,108,101,39,115,32,112, + 97,116,104,10,32,32,32,32,105,115,32,115,121,115,46,112, + 97,116,104,46,99,4,0,0,0,0,0,0,0,0,0,0, + 0,4,0,0,0,3,0,0,0,67,0,0,0,115,36,0, + 0,0,124,1,124,0,95,0,124,2,124,0,95,1,116,2, + 124,0,160,3,161,0,131,1,124,0,95,4,124,3,124,0, + 95,5,100,0,83,0,114,109,0,0,0,41,6,218,5,95, + 110,97,109,101,218,5,95,112,97,116,104,114,111,0,0,0, + 218,16,95,103,101,116,95,112,97,114,101,110,116,95,112,97, + 116,104,218,17,95,108,97,115,116,95,112,97,114,101,110,116, + 95,112,97,116,104,218,12,95,112,97,116,104,95,102,105,110, + 100,101,114,169,4,114,118,0,0,0,114,116,0,0,0,114, + 44,0,0,0,90,11,112,97,116,104,95,102,105,110,100,101, + 114,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,209,0,0,0,101,4,0,0,115,8,0,0,0,0,1, + 6,1,6,1,14,1,122,23,95,78,97,109,101,115,112,97, + 99,101,80,97,116,104,46,95,95,105,110,105,116,95,95,99, + 1,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0, + 3,0,0,0,67,0,0,0,115,38,0,0,0,124,0,106, + 0,160,1,100,1,161,1,92,3,125,1,125,2,125,3,124, + 2,100,2,107,2,114,30,100,3,83,0,124,1,100,4,102, + 2,83,0,41,5,122,62,82,101,116,117,114,110,115,32,97, + 32,116,117,112,108,101,32,111,102,32,40,112,97,114,101,110, + 116,45,109,111,100,117,108,101,45,110,97,109,101,44,32,112, + 97,114,101,110,116,45,112,97,116,104,45,97,116,116,114,45, + 110,97,109,101,41,114,71,0,0,0,114,40,0,0,0,41, + 2,114,1,0,0,0,114,44,0,0,0,90,8,95,95,112, + 97,116,104,95,95,41,2,114,14,1,0,0,114,41,0,0, + 0,41,4,114,118,0,0,0,114,4,1,0,0,218,3,100, + 111,116,90,2,109,101,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,23,95,102,105,110,100,95,112,97,114, + 101,110,116,95,112,97,116,104,95,110,97,109,101,115,107,4, + 0,0,115,8,0,0,0,0,2,18,1,8,2,4,3,122, + 38,95,78,97,109,101,115,112,97,99,101,80,97,116,104,46, + 95,102,105,110,100,95,112,97,114,101,110,116,95,112,97,116, + 104,95,110,97,109,101,115,99,1,0,0,0,0,0,0,0, 0,0,0,0,3,0,0,0,3,0,0,0,67,0,0,0, - 115,14,0,0,0,124,2,124,0,106,0,124,1,60,0,100, - 0,83,0,114,109,0,0,0,41,1,114,24,1,0,0,41, - 3,114,118,0,0,0,114,35,1,0,0,114,44,0,0,0, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, - 11,95,95,115,101,116,105,116,101,109,95,95,162,4,0,0, - 115,2,0,0,0,0,1,122,26,95,78,97,109,101,115,112, - 97,99,101,80,97,116,104,46,95,95,115,101,116,105,116,101, - 109,95,95,99,1,0,0,0,0,0,0,0,0,0,0,0, - 1,0,0,0,3,0,0,0,67,0,0,0,115,12,0,0, - 0,116,0,124,0,160,1,161,0,131,1,83,0,114,109,0, - 0,0,41,2,114,23,0,0,0,114,32,1,0,0,114,246, + 115,28,0,0,0,124,0,160,0,161,0,92,2,125,1,125, + 2,116,1,116,2,106,3,124,1,25,0,124,2,131,2,83, + 0,114,109,0,0,0,41,4,114,21,1,0,0,114,130,0, + 0,0,114,1,0,0,0,218,7,109,111,100,117,108,101,115, + 41,3,114,118,0,0,0,90,18,112,97,114,101,110,116,95, + 109,111,100,117,108,101,95,110,97,109,101,90,14,112,97,116, + 104,95,97,116,116,114,95,110,97,109,101,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,16,1,0,0,117, + 4,0,0,115,4,0,0,0,0,1,12,1,122,31,95,78, + 97,109,101,115,112,97,99,101,80,97,116,104,46,95,103,101, + 116,95,112,97,114,101,110,116,95,112,97,116,104,99,1,0, + 0,0,0,0,0,0,0,0,0,0,3,0,0,0,4,0, + 0,0,67,0,0,0,115,80,0,0,0,116,0,124,0,160, + 1,161,0,131,1,125,1,124,1,124,0,106,2,107,3,114, + 74,124,0,160,3,124,0,106,4,124,1,161,2,125,2,124, + 2,100,0,117,1,114,68,124,2,106,5,100,0,117,0,114, + 68,124,2,106,6,114,68,124,2,106,6,124,0,95,7,124, + 1,124,0,95,2,124,0,106,7,83,0,114,109,0,0,0, + 41,8,114,111,0,0,0,114,16,1,0,0,114,17,1,0, + 0,114,18,1,0,0,114,14,1,0,0,114,140,0,0,0, + 114,178,0,0,0,114,15,1,0,0,41,3,114,118,0,0, + 0,90,11,112,97,114,101,110,116,95,112,97,116,104,114,187, 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,218,7,95,95,108,101,110,95,95,165,4,0,0,115, - 2,0,0,0,0,1,122,22,95,78,97,109,101,115,112,97, - 99,101,80,97,116,104,46,95,95,108,101,110,95,95,99,1, - 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3, - 0,0,0,67,0,0,0,115,12,0,0,0,100,1,160,0, - 124,0,106,1,161,1,83,0,41,2,78,122,20,95,78,97, - 109,101,115,112,97,99,101,80,97,116,104,40,123,33,114,125, - 41,41,2,114,62,0,0,0,114,24,1,0,0,114,246,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,218,8,95,95,114,101,112,114,95,95,168,4,0,0,115, - 2,0,0,0,0,1,122,23,95,78,97,109,101,115,112,97, - 99,101,80,97,116,104,46,95,95,114,101,112,114,95,95,99, - 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 3,0,0,0,67,0,0,0,115,12,0,0,0,124,1,124, - 0,160,0,161,0,118,0,83,0,114,109,0,0,0,114,34, - 1,0,0,169,2,114,118,0,0,0,218,4,105,116,101,109, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, - 12,95,95,99,111,110,116,97,105,110,115,95,95,171,4,0, - 0,115,2,0,0,0,0,1,122,27,95,78,97,109,101,115, - 112,97,99,101,80,97,116,104,46,95,95,99,111,110,116,97, - 105,110,115,95,95,99,2,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,3,0,0,0,67,0,0,0,115,16, - 0,0,0,124,0,106,0,160,1,124,1,161,1,1,0,100, - 0,83,0,114,109,0,0,0,41,2,114,24,1,0,0,114, - 186,0,0,0,114,40,1,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,186,0,0,0,174,4,0, - 0,115,2,0,0,0,0,1,122,21,95,78,97,109,101,115, - 112,97,99,101,80,97,116,104,46,97,112,112,101,110,100,78, - 41,15,114,125,0,0,0,114,124,0,0,0,114,126,0,0, - 0,114,127,0,0,0,114,209,0,0,0,114,30,1,0,0, - 114,25,1,0,0,114,32,1,0,0,114,33,1,0,0,114, - 36,1,0,0,114,37,1,0,0,114,38,1,0,0,114,39, - 1,0,0,114,42,1,0,0,114,186,0,0,0,114,5,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,114,22,1,0,0,116,4,0,0,115,24,0,0,0,8, - 1,4,6,8,6,8,10,8,4,8,13,8,3,8,3,8, - 3,8,3,8,3,8,3,114,22,1,0,0,99,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0, - 0,64,0,0,0,115,80,0,0,0,101,0,90,1,100,0, - 90,2,100,1,100,2,132,0,90,3,101,4,100,3,100,4, - 132,0,131,1,90,5,100,5,100,6,132,0,90,6,100,7, - 100,8,132,0,90,7,100,9,100,10,132,0,90,8,100,11, - 100,12,132,0,90,9,100,13,100,14,132,0,90,10,100,15, - 100,16,132,0,90,11,100,17,83,0,41,18,218,16,95,78, - 97,109,101,115,112,97,99,101,76,111,97,100,101,114,99,4, - 0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,4, - 0,0,0,67,0,0,0,115,18,0,0,0,116,0,124,1, - 124,2,124,3,131,3,124,0,95,1,100,0,83,0,114,109, - 0,0,0,41,2,114,22,1,0,0,114,24,1,0,0,114, - 28,1,0,0,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,114,209,0,0,0,180,4,0,0,115,2,0,0, - 0,0,1,122,25,95,78,97,109,101,115,112,97,99,101,76, - 111,97,100,101,114,46,95,95,105,110,105,116,95,95,99,2, - 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,3, - 0,0,0,67,0,0,0,115,12,0,0,0,100,1,160,0, - 124,1,106,1,161,1,83,0,41,2,122,115,82,101,116,117, - 114,110,32,114,101,112,114,32,102,111,114,32,116,104,101,32, - 109,111,100,117,108,101,46,10,10,32,32,32,32,32,32,32, - 32,84,104,101,32,109,101,116,104,111,100,32,105,115,32,100, - 101,112,114,101,99,97,116,101,100,46,32,32,84,104,101,32, - 105,109,112,111,114,116,32,109,97,99,104,105,110,101,114,121, - 32,100,111,101,115,32,116,104,101,32,106,111,98,32,105,116, - 115,101,108,102,46,10,10,32,32,32,32,32,32,32,32,122, - 25,60,109,111,100,117,108,101,32,123,33,114,125,32,40,110, - 97,109,101,115,112,97,99,101,41,62,41,2,114,62,0,0, - 0,114,125,0,0,0,41,2,114,193,0,0,0,114,216,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,218,11,109,111,100,117,108,101,95,114,101,112,114,183,4, - 0,0,115,2,0,0,0,0,7,122,28,95,78,97,109,101, - 115,112,97,99,101,76,111,97,100,101,114,46,109,111,100,117, - 108,101,95,114,101,112,114,99,2,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,1,0,0,0,67,0,0,0, - 115,4,0,0,0,100,1,83,0,41,2,78,84,114,5,0, - 0,0,114,219,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,182,0,0,0,192,4,0,0,115, - 2,0,0,0,0,1,122,27,95,78,97,109,101,115,112,97, - 99,101,76,111,97,100,101,114,46,105,115,95,112,97,99,107, - 97,103,101,99,2,0,0,0,0,0,0,0,0,0,0,0, - 2,0,0,0,1,0,0,0,67,0,0,0,115,4,0,0, - 0,100,1,83,0,41,2,78,114,40,0,0,0,114,5,0, - 0,0,114,219,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,229,0,0,0,195,4,0,0,115, - 2,0,0,0,0,1,122,27,95,78,97,109,101,115,112,97, - 99,101,76,111,97,100,101,114,46,103,101,116,95,115,111,117, - 114,99,101,99,2,0,0,0,0,0,0,0,0,0,0,0, - 2,0,0,0,6,0,0,0,67,0,0,0,115,16,0,0, - 0,116,0,100,1,100,2,100,3,100,4,100,5,141,4,83, - 0,41,6,78,114,40,0,0,0,122,8,60,115,116,114,105, - 110,103,62,114,215,0,0,0,84,41,1,114,231,0,0,0, - 41,1,114,232,0,0,0,114,219,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,114,213,0,0,0, - 198,4,0,0,115,2,0,0,0,0,1,122,25,95,78,97, - 109,101,115,112,97,99,101,76,111,97,100,101,114,46,103,101, - 116,95,99,111,100,101,99,2,0,0,0,0,0,0,0,0, - 0,0,0,2,0,0,0,1,0,0,0,67,0,0,0,115, - 4,0,0,0,100,1,83,0,114,210,0,0,0,114,5,0, - 0,0,114,211,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,212,0,0,0,201,4,0,0,115, - 2,0,0,0,0,1,122,30,95,78,97,109,101,115,112,97, - 99,101,76,111,97,100,101,114,46,99,114,101,97,116,101,95, - 109,111,100,117,108,101,99,2,0,0,0,0,0,0,0,0, - 0,0,0,2,0,0,0,1,0,0,0,67,0,0,0,115, - 4,0,0,0,100,0,83,0,114,109,0,0,0,114,5,0, - 0,0,114,253,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,217,0,0,0,204,4,0,0,115, - 2,0,0,0,0,1,122,28,95,78,97,109,101,115,112,97, - 99,101,76,111,97,100,101,114,46,101,120,101,99,95,109,111, - 100,117,108,101,99,2,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,4,0,0,0,67,0,0,0,115,26,0, - 0,0,116,0,160,1,100,1,124,0,106,2,161,2,1,0, - 116,0,160,3,124,0,124,1,161,2,83,0,41,2,122,98, - 76,111,97,100,32,97,32,110,97,109,101,115,112,97,99,101, - 32,109,111,100,117,108,101,46,10,10,32,32,32,32,32,32, - 32,32,84,104,105,115,32,109,101,116,104,111,100,32,105,115, - 32,100,101,112,114,101,99,97,116,101,100,46,32,32,85,115, - 101,32,101,120,101,99,95,109,111,100,117,108,101,40,41,32, - 105,110,115,116,101,97,100,46,10,10,32,32,32,32,32,32, - 32,32,122,38,110,97,109,101,115,112,97,99,101,32,109,111, - 100,117,108,101,32,108,111,97,100,101,100,32,119,105,116,104, - 32,112,97,116,104,32,123,33,114,125,41,4,114,134,0,0, - 0,114,149,0,0,0,114,24,1,0,0,114,218,0,0,0, - 114,219,0,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,220,0,0,0,207,4,0,0,115,8,0, - 0,0,0,7,6,1,4,255,4,2,122,28,95,78,97,109, - 101,115,112,97,99,101,76,111,97,100,101,114,46,108,111,97, - 100,95,109,111,100,117,108,101,78,41,12,114,125,0,0,0, - 114,124,0,0,0,114,126,0,0,0,114,209,0,0,0,114, - 207,0,0,0,114,44,1,0,0,114,182,0,0,0,114,229, - 0,0,0,114,213,0,0,0,114,212,0,0,0,114,217,0, - 0,0,114,220,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,114,43,1,0,0, - 179,4,0,0,115,18,0,0,0,8,1,8,3,2,1,10, - 8,8,3,8,3,8,3,8,3,8,3,114,43,1,0,0, + 0,0,218,12,95,114,101,99,97,108,99,117,108,97,116,101, + 121,4,0,0,115,16,0,0,0,0,2,12,1,10,1,14, + 3,18,1,6,1,8,1,6,1,122,27,95,78,97,109,101, + 115,112,97,99,101,80,97,116,104,46,95,114,101,99,97,108, + 99,117,108,97,116,101,99,1,0,0,0,0,0,0,0,0, + 0,0,0,1,0,0,0,3,0,0,0,67,0,0,0,115, + 12,0,0,0,116,0,124,0,160,1,161,0,131,1,83,0, + 114,109,0,0,0,41,2,218,4,105,116,101,114,114,23,1, + 0,0,114,246,0,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,218,8,95,95,105,116,101,114,95,95, + 134,4,0,0,115,2,0,0,0,0,1,122,23,95,78,97, + 109,101,115,112,97,99,101,80,97,116,104,46,95,95,105,116, + 101,114,95,95,99,2,0,0,0,0,0,0,0,0,0,0, + 0,2,0,0,0,2,0,0,0,67,0,0,0,115,12,0, + 0,0,124,0,160,0,161,0,124,1,25,0,83,0,114,109, + 0,0,0,169,1,114,23,1,0,0,41,2,114,118,0,0, + 0,218,5,105,110,100,101,120,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,218,11,95,95,103,101,116,105,116, + 101,109,95,95,137,4,0,0,115,2,0,0,0,0,1,122, + 26,95,78,97,109,101,115,112,97,99,101,80,97,116,104,46, + 95,95,103,101,116,105,116,101,109,95,95,99,3,0,0,0, + 0,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0, + 67,0,0,0,115,14,0,0,0,124,2,124,0,106,0,124, + 1,60,0,100,0,83,0,114,109,0,0,0,41,1,114,15, + 1,0,0,41,3,114,118,0,0,0,114,27,1,0,0,114, + 44,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,218,11,95,95,115,101,116,105,116,101,109,95,95, + 140,4,0,0,115,2,0,0,0,0,1,122,26,95,78,97, + 109,101,115,112,97,99,101,80,97,116,104,46,95,95,115,101, + 116,105,116,101,109,95,95,99,1,0,0,0,0,0,0,0, + 0,0,0,0,1,0,0,0,3,0,0,0,67,0,0,0, + 115,12,0,0,0,116,0,124,0,160,1,161,0,131,1,83, + 0,114,109,0,0,0,41,2,114,23,0,0,0,114,23,1, + 0,0,114,246,0,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,218,7,95,95,108,101,110,95,95,143, + 4,0,0,115,2,0,0,0,0,1,122,22,95,78,97,109, + 101,115,112,97,99,101,80,97,116,104,46,95,95,108,101,110, + 95,95,99,1,0,0,0,0,0,0,0,0,0,0,0,1, + 0,0,0,3,0,0,0,67,0,0,0,115,12,0,0,0, + 100,1,160,0,124,0,106,1,161,1,83,0,41,2,78,122, + 20,95,78,97,109,101,115,112,97,99,101,80,97,116,104,40, + 123,33,114,125,41,41,2,114,62,0,0,0,114,15,1,0, + 0,114,246,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,8,95,95,114,101,112,114,95,95,146, + 4,0,0,115,2,0,0,0,0,1,122,23,95,78,97,109, + 101,115,112,97,99,101,80,97,116,104,46,95,95,114,101,112, + 114,95,95,99,2,0,0,0,0,0,0,0,0,0,0,0, + 2,0,0,0,3,0,0,0,67,0,0,0,115,12,0,0, + 0,124,1,124,0,160,0,161,0,118,0,83,0,114,109,0, + 0,0,114,26,1,0,0,169,2,114,118,0,0,0,218,4, + 105,116,101,109,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,218,12,95,95,99,111,110,116,97,105,110,115,95, + 95,149,4,0,0,115,2,0,0,0,0,1,122,27,95,78, + 97,109,101,115,112,97,99,101,80,97,116,104,46,95,95,99, + 111,110,116,97,105,110,115,95,95,99,2,0,0,0,0,0, + 0,0,0,0,0,0,2,0,0,0,3,0,0,0,67,0, + 0,0,115,16,0,0,0,124,0,106,0,160,1,124,1,161, + 1,1,0,100,0,83,0,114,109,0,0,0,41,2,114,15, + 1,0,0,114,186,0,0,0,114,32,1,0,0,114,5,0, + 0,0,114,5,0,0,0,114,8,0,0,0,114,186,0,0, + 0,152,4,0,0,115,2,0,0,0,0,1,122,21,95,78, + 97,109,101,115,112,97,99,101,80,97,116,104,46,97,112,112, + 101,110,100,78,41,15,114,125,0,0,0,114,124,0,0,0, + 114,126,0,0,0,114,127,0,0,0,114,209,0,0,0,114, + 21,1,0,0,114,16,1,0,0,114,23,1,0,0,114,25, + 1,0,0,114,28,1,0,0,114,29,1,0,0,114,30,1, + 0,0,114,31,1,0,0,114,34,1,0,0,114,186,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,114,13,1,0,0,94,4,0,0,115,24, + 0,0,0,8,1,4,6,8,6,8,10,8,4,8,13,8, + 3,8,3,8,3,8,3,8,3,8,3,114,13,1,0,0, 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,4,0,0,0,64,0,0,0,115,118,0,0,0,101,0, - 90,1,100,0,90,2,100,1,90,3,101,4,100,2,100,3, - 132,0,131,1,90,5,101,4,100,4,100,5,132,0,131,1, - 90,6,101,4,100,6,100,7,132,0,131,1,90,7,101,4, - 100,8,100,9,132,0,131,1,90,8,101,4,100,19,100,11, - 100,12,132,1,131,1,90,9,101,4,100,20,100,13,100,14, - 132,1,131,1,90,10,101,4,100,21,100,15,100,16,132,1, - 131,1,90,11,101,4,100,17,100,18,132,0,131,1,90,12, - 100,10,83,0,41,22,218,10,80,97,116,104,70,105,110,100, - 101,114,122,62,77,101,116,97,32,112,97,116,104,32,102,105, - 110,100,101,114,32,102,111,114,32,115,121,115,46,112,97,116, - 104,32,97,110,100,32,112,97,99,107,97,103,101,32,95,95, - 112,97,116,104,95,95,32,97,116,116,114,105,98,117,116,101, - 115,46,99,1,0,0,0,0,0,0,0,0,0,0,0,3, - 0,0,0,4,0,0,0,67,0,0,0,115,64,0,0,0, - 116,0,116,1,106,2,160,3,161,0,131,1,68,0,93,44, - 92,2,125,1,125,2,124,2,100,1,117,0,114,40,116,1, - 106,2,124,1,61,0,113,14,116,4,124,2,100,2,131,2, - 114,14,124,2,160,5,161,0,1,0,113,14,100,1,83,0, - 41,3,122,125,67,97,108,108,32,116,104,101,32,105,110,118, - 97,108,105,100,97,116,101,95,99,97,99,104,101,115,40,41, - 32,109,101,116,104,111,100,32,111,110,32,97,108,108,32,112, - 97,116,104,32,101,110,116,114,121,32,102,105,110,100,101,114, - 115,10,32,32,32,32,32,32,32,32,115,116,111,114,101,100, - 32,105,110,32,115,121,115,46,112,97,116,104,95,105,109,112, - 111,114,116,101,114,95,99,97,99,104,101,115,32,40,119,104, - 101,114,101,32,105,109,112,108,101,109,101,110,116,101,100,41, - 46,78,218,17,105,110,118,97,108,105,100,97,116,101,95,99, - 97,99,104,101,115,41,6,218,4,108,105,115,116,114,1,0, - 0,0,218,19,112,97,116,104,95,105,109,112,111,114,116,101, - 114,95,99,97,99,104,101,218,5,105,116,101,109,115,114,128, - 0,0,0,114,46,1,0,0,41,3,114,193,0,0,0,114, - 116,0,0,0,218,6,102,105,110,100,101,114,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,114,46,1,0,0, - 225,4,0,0,115,10,0,0,0,0,4,22,1,8,1,10, - 1,10,1,122,28,80,97,116,104,70,105,110,100,101,114,46, - 105,110,118,97,108,105,100,97,116,101,95,99,97,99,104,101, - 115,99,2,0,0,0,0,0,0,0,0,0,0,0,3,0, - 0,0,9,0,0,0,67,0,0,0,115,82,0,0,0,116, - 0,106,1,100,1,117,1,114,28,116,0,106,1,115,28,116, - 2,160,3,100,2,116,4,161,2,1,0,116,0,106,1,68, - 0,93,42,125,2,122,14,124,2,124,1,131,1,87,0,2, - 0,1,0,83,0,4,0,116,5,121,74,1,0,1,0,1, - 0,89,0,113,34,89,0,113,34,48,0,113,34,100,1,83, - 0,41,3,122,46,83,101,97,114,99,104,32,115,121,115,46, - 112,97,116,104,95,104,111,111,107,115,32,102,111,114,32,97, - 32,102,105,110,100,101,114,32,102,111,114,32,39,112,97,116, - 104,39,46,78,122,23,115,121,115,46,112,97,116,104,95,104, - 111,111,107,115,32,105,115,32,101,109,112,116,121,41,6,114, - 1,0,0,0,218,10,112,97,116,104,95,104,111,111,107,115, - 114,75,0,0,0,114,76,0,0,0,114,138,0,0,0,114, - 117,0,0,0,41,3,114,193,0,0,0,114,44,0,0,0, - 90,4,104,111,111,107,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,218,11,95,112,97,116,104,95,104,111,111, - 107,115,235,4,0,0,115,16,0,0,0,0,3,16,1,12, - 1,10,1,2,1,14,1,12,1,12,2,122,22,80,97,116, - 104,70,105,110,100,101,114,46,95,112,97,116,104,95,104,111, - 111,107,115,99,2,0,0,0,0,0,0,0,0,0,0,0, - 3,0,0,0,8,0,0,0,67,0,0,0,115,100,0,0, - 0,124,1,100,1,107,2,114,42,122,12,116,0,160,1,161, - 0,125,1,87,0,110,20,4,0,116,2,121,40,1,0,1, - 0,1,0,89,0,100,2,83,0,48,0,122,14,116,3,106, - 4,124,1,25,0,125,2,87,0,110,38,4,0,116,5,121, - 94,1,0,1,0,1,0,124,0,160,6,124,1,161,1,125, - 2,124,2,116,3,106,4,124,1,60,0,89,0,110,2,48, - 0,124,2,83,0,41,3,122,210,71,101,116,32,116,104,101, - 32,102,105,110,100,101,114,32,102,111,114,32,116,104,101,32, - 112,97,116,104,32,101,110,116,114,121,32,102,114,111,109,32, + 0,3,0,0,0,64,0,0,0,115,80,0,0,0,101,0, + 90,1,100,0,90,2,100,1,100,2,132,0,90,3,101,4, + 100,3,100,4,132,0,131,1,90,5,100,5,100,6,132,0, + 90,6,100,7,100,8,132,0,90,7,100,9,100,10,132,0, + 90,8,100,11,100,12,132,0,90,9,100,13,100,14,132,0, + 90,10,100,15,100,16,132,0,90,11,100,17,83,0,41,18, + 218,16,95,78,97,109,101,115,112,97,99,101,76,111,97,100, + 101,114,99,4,0,0,0,0,0,0,0,0,0,0,0,4, + 0,0,0,4,0,0,0,67,0,0,0,115,18,0,0,0, + 116,0,124,1,124,2,124,3,131,3,124,0,95,1,100,0, + 83,0,114,109,0,0,0,41,2,114,13,1,0,0,114,15, + 1,0,0,114,19,1,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,209,0,0,0,158,4,0,0, + 115,2,0,0,0,0,1,122,25,95,78,97,109,101,115,112, + 97,99,101,76,111,97,100,101,114,46,95,95,105,110,105,116, + 95,95,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,3,0,0,0,67,0,0,0,115,12,0,0,0, + 100,1,160,0,124,1,106,1,161,1,83,0,41,2,122,115, + 82,101,116,117,114,110,32,114,101,112,114,32,102,111,114,32, + 116,104,101,32,109,111,100,117,108,101,46,10,10,32,32,32, + 32,32,32,32,32,84,104,101,32,109,101,116,104,111,100,32, + 105,115,32,100,101,112,114,101,99,97,116,101,100,46,32,32, + 84,104,101,32,105,109,112,111,114,116,32,109,97,99,104,105, + 110,101,114,121,32,100,111,101,115,32,116,104,101,32,106,111, + 98,32,105,116,115,101,108,102,46,10,10,32,32,32,32,32, + 32,32,32,122,25,60,109,111,100,117,108,101,32,123,33,114, + 125,32,40,110,97,109,101,115,112,97,99,101,41,62,41,2, + 114,62,0,0,0,114,125,0,0,0,41,2,114,193,0,0, + 0,114,216,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,11,109,111,100,117,108,101,95,114,101, + 112,114,161,4,0,0,115,2,0,0,0,0,7,122,28,95, + 78,97,109,101,115,112,97,99,101,76,111,97,100,101,114,46, + 109,111,100,117,108,101,95,114,101,112,114,99,2,0,0,0, + 0,0,0,0,0,0,0,0,2,0,0,0,1,0,0,0, + 67,0,0,0,115,4,0,0,0,100,1,83,0,41,2,78, + 84,114,5,0,0,0,114,219,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,182,0,0,0,170, + 4,0,0,115,2,0,0,0,0,1,122,27,95,78,97,109, + 101,115,112,97,99,101,76,111,97,100,101,114,46,105,115,95, + 112,97,99,107,97,103,101,99,2,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,1,0,0,0,67,0,0,0, + 115,4,0,0,0,100,1,83,0,41,2,78,114,40,0,0, + 0,114,5,0,0,0,114,219,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,229,0,0,0,173, + 4,0,0,115,2,0,0,0,0,1,122,27,95,78,97,109, + 101,115,112,97,99,101,76,111,97,100,101,114,46,103,101,116, + 95,115,111,117,114,99,101,99,2,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,6,0,0,0,67,0,0,0, + 115,16,0,0,0,116,0,100,1,100,2,100,3,100,4,100, + 5,141,4,83,0,41,6,78,114,40,0,0,0,122,8,60, + 115,116,114,105,110,103,62,114,215,0,0,0,84,41,1,114, + 231,0,0,0,41,1,114,232,0,0,0,114,219,0,0,0, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, + 213,0,0,0,176,4,0,0,115,2,0,0,0,0,1,122, + 25,95,78,97,109,101,115,112,97,99,101,76,111,97,100,101, + 114,46,103,101,116,95,99,111,100,101,99,2,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, + 0,0,0,115,4,0,0,0,100,1,83,0,114,210,0,0, + 0,114,5,0,0,0,114,211,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,212,0,0,0,179, + 4,0,0,115,2,0,0,0,0,1,122,30,95,78,97,109, + 101,115,112,97,99,101,76,111,97,100,101,114,46,99,114,101, + 97,116,101,95,109,111,100,117,108,101,99,2,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, + 0,0,0,115,4,0,0,0,100,0,83,0,114,109,0,0, + 0,114,5,0,0,0,114,6,1,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,217,0,0,0,182, + 4,0,0,115,2,0,0,0,0,1,122,28,95,78,97,109, + 101,115,112,97,99,101,76,111,97,100,101,114,46,101,120,101, + 99,95,109,111,100,117,108,101,99,2,0,0,0,0,0,0, + 0,0,0,0,0,2,0,0,0,4,0,0,0,67,0,0, + 0,115,26,0,0,0,116,0,160,1,100,1,124,0,106,2, + 161,2,1,0,116,0,160,3,124,0,124,1,161,2,83,0, + 41,2,122,98,76,111,97,100,32,97,32,110,97,109,101,115, + 112,97,99,101,32,109,111,100,117,108,101,46,10,10,32,32, + 32,32,32,32,32,32,84,104,105,115,32,109,101,116,104,111, + 100,32,105,115,32,100,101,112,114,101,99,97,116,101,100,46, + 32,32,85,115,101,32,101,120,101,99,95,109,111,100,117,108, + 101,40,41,32,105,110,115,116,101,97,100,46,10,10,32,32, + 32,32,32,32,32,32,122,38,110,97,109,101,115,112,97,99, + 101,32,109,111,100,117,108,101,32,108,111,97,100,101,100,32, + 119,105,116,104,32,112,97,116,104,32,123,33,114,125,41,4, + 114,134,0,0,0,114,149,0,0,0,114,15,1,0,0,114, + 218,0,0,0,114,219,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,220,0,0,0,185,4,0, + 0,115,8,0,0,0,0,7,6,1,4,255,4,2,122,28, + 95,78,97,109,101,115,112,97,99,101,76,111,97,100,101,114, + 46,108,111,97,100,95,109,111,100,117,108,101,78,41,12,114, + 125,0,0,0,114,124,0,0,0,114,126,0,0,0,114,209, + 0,0,0,114,207,0,0,0,114,36,1,0,0,114,182,0, + 0,0,114,229,0,0,0,114,213,0,0,0,114,212,0,0, + 0,114,217,0,0,0,114,220,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, + 35,1,0,0,157,4,0,0,115,18,0,0,0,8,1,8, + 3,2,1,10,8,8,3,8,3,8,3,8,3,8,3,114, + 35,1,0,0,99,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,4,0,0,0,64,0,0,0,115,118,0, + 0,0,101,0,90,1,100,0,90,2,100,1,90,3,101,4, + 100,2,100,3,132,0,131,1,90,5,101,4,100,4,100,5, + 132,0,131,1,90,6,101,4,100,6,100,7,132,0,131,1, + 90,7,101,4,100,8,100,9,132,0,131,1,90,8,101,4, + 100,19,100,11,100,12,132,1,131,1,90,9,101,4,100,20, + 100,13,100,14,132,1,131,1,90,10,101,4,100,21,100,15, + 100,16,132,1,131,1,90,11,101,4,100,17,100,18,132,0, + 131,1,90,12,100,10,83,0,41,22,218,10,80,97,116,104, + 70,105,110,100,101,114,122,62,77,101,116,97,32,112,97,116, + 104,32,102,105,110,100,101,114,32,102,111,114,32,115,121,115, + 46,112,97,116,104,32,97,110,100,32,112,97,99,107,97,103, + 101,32,95,95,112,97,116,104,95,95,32,97,116,116,114,105, + 98,117,116,101,115,46,99,1,0,0,0,0,0,0,0,0, + 0,0,0,3,0,0,0,4,0,0,0,67,0,0,0,115, + 64,0,0,0,116,0,116,1,106,2,160,3,161,0,131,1, + 68,0,93,44,92,2,125,1,125,2,124,2,100,1,117,0, + 114,40,116,1,106,2,124,1,61,0,113,14,116,4,124,2, + 100,2,131,2,114,14,124,2,160,5,161,0,1,0,113,14, + 100,1,83,0,41,3,122,125,67,97,108,108,32,116,104,101, + 32,105,110,118,97,108,105,100,97,116,101,95,99,97,99,104, + 101,115,40,41,32,109,101,116,104,111,100,32,111,110,32,97, + 108,108,32,112,97,116,104,32,101,110,116,114,121,32,102,105, + 110,100,101,114,115,10,32,32,32,32,32,32,32,32,115,116, + 111,114,101,100,32,105,110,32,115,121,115,46,112,97,116,104, + 95,105,109,112,111,114,116,101,114,95,99,97,99,104,101,115, + 32,40,119,104,101,114,101,32,105,109,112,108,101,109,101,110, + 116,101,100,41,46,78,218,17,105,110,118,97,108,105,100,97, + 116,101,95,99,97,99,104,101,115,41,6,218,4,108,105,115, + 116,114,1,0,0,0,218,19,112,97,116,104,95,105,109,112, + 111,114,116,101,114,95,99,97,99,104,101,218,5,105,116,101, + 109,115,114,128,0,0,0,114,38,1,0,0,41,3,114,193, + 0,0,0,114,116,0,0,0,218,6,102,105,110,100,101,114, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, + 38,1,0,0,203,4,0,0,115,10,0,0,0,0,4,22, + 1,8,1,10,1,10,1,122,28,80,97,116,104,70,105,110, + 100,101,114,46,105,110,118,97,108,105,100,97,116,101,95,99, + 97,99,104,101,115,99,2,0,0,0,0,0,0,0,0,0, + 0,0,3,0,0,0,9,0,0,0,67,0,0,0,115,82, + 0,0,0,116,0,106,1,100,1,117,1,114,28,116,0,106, + 1,115,28,116,2,160,3,100,2,116,4,161,2,1,0,116, + 0,106,1,68,0,93,42,125,2,122,14,124,2,124,1,131, + 1,87,0,2,0,1,0,83,0,4,0,116,5,121,74,1, + 0,1,0,1,0,89,0,113,34,89,0,113,34,48,0,113, + 34,100,1,83,0,41,3,122,46,83,101,97,114,99,104,32, + 115,121,115,46,112,97,116,104,95,104,111,111,107,115,32,102, + 111,114,32,97,32,102,105,110,100,101,114,32,102,111,114,32, + 39,112,97,116,104,39,46,78,122,23,115,121,115,46,112,97, + 116,104,95,104,111,111,107,115,32,105,115,32,101,109,112,116, + 121,41,6,114,1,0,0,0,218,10,112,97,116,104,95,104, + 111,111,107,115,114,75,0,0,0,114,76,0,0,0,114,138, + 0,0,0,114,117,0,0,0,41,3,114,193,0,0,0,114, + 44,0,0,0,90,4,104,111,111,107,114,5,0,0,0,114, + 5,0,0,0,114,8,0,0,0,218,11,95,112,97,116,104, + 95,104,111,111,107,115,213,4,0,0,115,16,0,0,0,0, + 3,16,1,12,1,10,1,2,1,14,1,12,1,12,2,122, + 22,80,97,116,104,70,105,110,100,101,114,46,95,112,97,116, + 104,95,104,111,111,107,115,99,2,0,0,0,0,0,0,0, + 0,0,0,0,3,0,0,0,8,0,0,0,67,0,0,0, + 115,100,0,0,0,124,1,100,1,107,2,114,42,122,12,116, + 0,160,1,161,0,125,1,87,0,110,20,4,0,116,2,121, + 40,1,0,1,0,1,0,89,0,100,2,83,0,48,0,122, + 14,116,3,106,4,124,1,25,0,125,2,87,0,110,38,4, + 0,116,5,121,94,1,0,1,0,1,0,124,0,160,6,124, + 1,161,1,125,2,124,2,116,3,106,4,124,1,60,0,89, + 0,110,2,48,0,124,2,83,0,41,3,122,210,71,101,116, + 32,116,104,101,32,102,105,110,100,101,114,32,102,111,114,32, + 116,104,101,32,112,97,116,104,32,101,110,116,114,121,32,102, + 114,111,109,32,115,121,115,46,112,97,116,104,95,105,109,112, + 111,114,116,101,114,95,99,97,99,104,101,46,10,10,32,32, + 32,32,32,32,32,32,73,102,32,116,104,101,32,112,97,116, + 104,32,101,110,116,114,121,32,105,115,32,110,111,116,32,105, + 110,32,116,104,101,32,99,97,99,104,101,44,32,102,105,110, + 100,32,116,104,101,32,97,112,112,114,111,112,114,105,97,116, + 101,32,102,105,110,100,101,114,10,32,32,32,32,32,32,32, + 32,97,110,100,32,99,97,99,104,101,32,105,116,46,32,73, + 102,32,110,111,32,102,105,110,100,101,114,32,105,115,32,97, + 118,97,105,108,97,98,108,101,44,32,115,116,111,114,101,32, + 78,111,110,101,46,10,10,32,32,32,32,32,32,32,32,114, + 40,0,0,0,78,41,7,114,4,0,0,0,114,55,0,0, + 0,218,17,70,105,108,101,78,111,116,70,111,117,110,100,69, + 114,114,111,114,114,1,0,0,0,114,40,1,0,0,218,8, + 75,101,121,69,114,114,111,114,114,44,1,0,0,41,3,114, + 193,0,0,0,114,44,0,0,0,114,42,1,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,20,95, + 112,97,116,104,95,105,109,112,111,114,116,101,114,95,99,97, + 99,104,101,226,4,0,0,115,22,0,0,0,0,8,8,1, + 2,1,12,1,12,3,8,1,2,1,14,1,12,1,10,1, + 16,1,122,31,80,97,116,104,70,105,110,100,101,114,46,95, + 112,97,116,104,95,105,109,112,111,114,116,101,114,95,99,97, + 99,104,101,99,3,0,0,0,0,0,0,0,0,0,0,0, + 6,0,0,0,4,0,0,0,67,0,0,0,115,82,0,0, + 0,116,0,124,2,100,1,131,2,114,26,124,2,160,1,124, + 1,161,1,92,2,125,3,125,4,110,14,124,2,160,2,124, + 1,161,1,125,3,103,0,125,4,124,3,100,0,117,1,114, + 60,116,3,160,4,124,1,124,3,161,2,83,0,116,3,160, + 5,124,1,100,0,161,2,125,5,124,4,124,5,95,6,124, + 5,83,0,41,2,78,114,137,0,0,0,41,7,114,128,0, + 0,0,114,137,0,0,0,114,206,0,0,0,114,134,0,0, + 0,114,201,0,0,0,114,183,0,0,0,114,178,0,0,0, + 41,6,114,193,0,0,0,114,139,0,0,0,114,42,1,0, + 0,114,140,0,0,0,114,141,0,0,0,114,187,0,0,0, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, + 16,95,108,101,103,97,99,121,95,103,101,116,95,115,112,101, + 99,248,4,0,0,115,18,0,0,0,0,4,10,1,16,2, + 10,1,4,1,8,1,12,1,12,1,6,1,122,27,80,97, + 116,104,70,105,110,100,101,114,46,95,108,101,103,97,99,121, + 95,103,101,116,95,115,112,101,99,78,99,4,0,0,0,0, + 0,0,0,0,0,0,0,9,0,0,0,5,0,0,0,67, + 0,0,0,115,166,0,0,0,103,0,125,4,124,2,68,0, + 93,134,125,5,116,0,124,5,116,1,116,2,102,2,131,2, + 115,28,113,8,124,0,160,3,124,5,161,1,125,6,124,6, + 100,1,117,1,114,8,116,4,124,6,100,2,131,2,114,70, + 124,6,160,5,124,1,124,3,161,2,125,7,110,12,124,0, + 160,6,124,1,124,6,161,2,125,7,124,7,100,1,117,0, + 114,92,113,8,124,7,106,7,100,1,117,1,114,110,124,7, + 2,0,1,0,83,0,124,7,106,8,125,8,124,8,100,1, + 117,0,114,132,116,9,100,3,131,1,130,1,124,4,160,10, + 124,8,161,1,1,0,113,8,116,11,160,12,124,1,100,1, + 161,2,125,7,124,4,124,7,95,8,124,7,83,0,41,4, + 122,63,70,105,110,100,32,116,104,101,32,108,111,97,100,101, + 114,32,111,114,32,110,97,109,101,115,112,97,99,101,95,112, + 97,116,104,32,102,111,114,32,116,104,105,115,32,109,111,100, + 117,108,101,47,112,97,99,107,97,103,101,32,110,97,109,101, + 46,78,114,203,0,0,0,122,19,115,112,101,99,32,109,105, + 115,115,105,110,103,32,108,111,97,100,101,114,41,13,114,161, + 0,0,0,114,84,0,0,0,218,5,98,121,116,101,115,114, + 47,1,0,0,114,128,0,0,0,114,203,0,0,0,114,48, + 1,0,0,114,140,0,0,0,114,178,0,0,0,114,117,0, + 0,0,114,167,0,0,0,114,134,0,0,0,114,183,0,0, + 0,41,9,114,193,0,0,0,114,139,0,0,0,114,44,0, + 0,0,114,202,0,0,0,218,14,110,97,109,101,115,112,97, + 99,101,95,112,97,116,104,90,5,101,110,116,114,121,114,42, + 1,0,0,114,187,0,0,0,114,141,0,0,0,114,5,0, + 0,0,114,5,0,0,0,114,8,0,0,0,218,9,95,103, + 101,116,95,115,112,101,99,7,5,0,0,115,40,0,0,0, + 0,5,4,1,8,1,14,1,2,1,10,1,8,1,10,1, + 14,2,12,1,8,1,2,1,10,1,8,1,6,1,8,1, + 8,5,12,2,12,1,6,1,122,20,80,97,116,104,70,105, + 110,100,101,114,46,95,103,101,116,95,115,112,101,99,99,4, + 0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,5, + 0,0,0,67,0,0,0,115,100,0,0,0,124,2,100,1, + 117,0,114,14,116,0,106,1,125,2,124,0,160,2,124,1, + 124,2,124,3,161,3,125,4,124,4,100,1,117,0,114,40, + 100,1,83,0,124,4,106,3,100,1,117,0,114,92,124,4, + 106,4,125,5,124,5,114,86,100,1,124,4,95,5,116,6, + 124,1,124,5,124,0,106,2,131,3,124,4,95,4,124,4, + 83,0,100,1,83,0,110,4,124,4,83,0,100,1,83,0, + 41,2,122,141,84,114,121,32,116,111,32,102,105,110,100,32, + 97,32,115,112,101,99,32,102,111,114,32,39,102,117,108,108, + 110,97,109,101,39,32,111,110,32,115,121,115,46,112,97,116, + 104,32,111,114,32,39,112,97,116,104,39,46,10,10,32,32, + 32,32,32,32,32,32,84,104,101,32,115,101,97,114,99,104, + 32,105,115,32,98,97,115,101,100,32,111,110,32,115,121,115, + 46,112,97,116,104,95,104,111,111,107,115,32,97,110,100,32, 115,121,115,46,112,97,116,104,95,105,109,112,111,114,116,101, - 114,95,99,97,99,104,101,46,10,10,32,32,32,32,32,32, - 32,32,73,102,32,116,104,101,32,112,97,116,104,32,101,110, - 116,114,121,32,105,115,32,110,111,116,32,105,110,32,116,104, - 101,32,99,97,99,104,101,44,32,102,105,110,100,32,116,104, - 101,32,97,112,112,114,111,112,114,105,97,116,101,32,102,105, - 110,100,101,114,10,32,32,32,32,32,32,32,32,97,110,100, - 32,99,97,99,104,101,32,105,116,46,32,73,102,32,110,111, - 32,102,105,110,100,101,114,32,105,115,32,97,118,97,105,108, - 97,98,108,101,44,32,115,116,111,114,101,32,78,111,110,101, - 46,10,10,32,32,32,32,32,32,32,32,114,40,0,0,0, - 78,41,7,114,4,0,0,0,114,55,0,0,0,114,3,1, - 0,0,114,1,0,0,0,114,48,1,0,0,218,8,75,101, - 121,69,114,114,111,114,114,52,1,0,0,41,3,114,193,0, - 0,0,114,44,0,0,0,114,50,1,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,218,20,95,112,97, - 116,104,95,105,109,112,111,114,116,101,114,95,99,97,99,104, - 101,248,4,0,0,115,22,0,0,0,0,8,8,1,2,1, - 12,1,12,3,8,1,2,1,14,1,12,1,10,1,16,1, - 122,31,80,97,116,104,70,105,110,100,101,114,46,95,112,97, - 116,104,95,105,109,112,111,114,116,101,114,95,99,97,99,104, - 101,99,3,0,0,0,0,0,0,0,0,0,0,0,6,0, - 0,0,4,0,0,0,67,0,0,0,115,82,0,0,0,116, - 0,124,2,100,1,131,2,114,26,124,2,160,1,124,1,161, - 1,92,2,125,3,125,4,110,14,124,2,160,2,124,1,161, - 1,125,3,103,0,125,4,124,3,100,0,117,1,114,60,116, - 3,160,4,124,1,124,3,161,2,83,0,116,3,160,5,124, - 1,100,0,161,2,125,5,124,4,124,5,95,6,124,5,83, - 0,41,2,78,114,137,0,0,0,41,7,114,128,0,0,0, - 114,137,0,0,0,114,206,0,0,0,114,134,0,0,0,114, - 201,0,0,0,114,183,0,0,0,114,178,0,0,0,41,6, - 114,193,0,0,0,114,139,0,0,0,114,50,1,0,0,114, - 140,0,0,0,114,141,0,0,0,114,187,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,218,16,95, - 108,101,103,97,99,121,95,103,101,116,95,115,112,101,99,14, - 5,0,0,115,18,0,0,0,0,4,10,1,16,2,10,1, - 4,1,8,1,12,1,12,1,6,1,122,27,80,97,116,104, - 70,105,110,100,101,114,46,95,108,101,103,97,99,121,95,103, - 101,116,95,115,112,101,99,78,99,4,0,0,0,0,0,0, - 0,0,0,0,0,9,0,0,0,5,0,0,0,67,0,0, - 0,115,166,0,0,0,103,0,125,4,124,2,68,0,93,134, - 125,5,116,0,124,5,116,1,116,2,102,2,131,2,115,28, - 113,8,124,0,160,3,124,5,161,1,125,6,124,6,100,1, - 117,1,114,8,116,4,124,6,100,2,131,2,114,70,124,6, - 160,5,124,1,124,3,161,2,125,7,110,12,124,0,160,6, - 124,1,124,6,161,2,125,7,124,7,100,1,117,0,114,92, - 113,8,124,7,106,7,100,1,117,1,114,110,124,7,2,0, - 1,0,83,0,124,7,106,8,125,8,124,8,100,1,117,0, - 114,132,116,9,100,3,131,1,130,1,124,4,160,10,124,8, - 161,1,1,0,113,8,116,11,160,12,124,1,100,1,161,2, - 125,7,124,4,124,7,95,8,124,7,83,0,41,4,122,63, - 70,105,110,100,32,116,104,101,32,108,111,97,100,101,114,32, - 111,114,32,110,97,109,101,115,112,97,99,101,95,112,97,116, - 104,32,102,111,114,32,116,104,105,115,32,109,111,100,117,108, - 101,47,112,97,99,107,97,103,101,32,110,97,109,101,46,78, - 114,203,0,0,0,122,19,115,112,101,99,32,109,105,115,115, - 105,110,103,32,108,111,97,100,101,114,41,13,114,161,0,0, - 0,114,84,0,0,0,218,5,98,121,116,101,115,114,54,1, - 0,0,114,128,0,0,0,114,203,0,0,0,114,55,1,0, - 0,114,140,0,0,0,114,178,0,0,0,114,117,0,0,0, - 114,167,0,0,0,114,134,0,0,0,114,183,0,0,0,41, - 9,114,193,0,0,0,114,139,0,0,0,114,44,0,0,0, - 114,202,0,0,0,218,14,110,97,109,101,115,112,97,99,101, - 95,112,97,116,104,90,5,101,110,116,114,121,114,50,1,0, - 0,114,187,0,0,0,114,141,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,218,9,95,103,101,116, - 95,115,112,101,99,29,5,0,0,115,40,0,0,0,0,5, - 4,1,8,1,14,1,2,1,10,1,8,1,10,1,14,2, - 12,1,8,1,2,1,10,1,8,1,6,1,8,1,8,5, - 12,2,12,1,6,1,122,20,80,97,116,104,70,105,110,100, - 101,114,46,95,103,101,116,95,115,112,101,99,99,4,0,0, - 0,0,0,0,0,0,0,0,0,6,0,0,0,5,0,0, - 0,67,0,0,0,115,100,0,0,0,124,2,100,1,117,0, - 114,14,116,0,106,1,125,2,124,0,160,2,124,1,124,2, - 124,3,161,3,125,4,124,4,100,1,117,0,114,40,100,1, - 83,0,124,4,106,3,100,1,117,0,114,92,124,4,106,4, - 125,5,124,5,114,86,100,1,124,4,95,5,116,6,124,1, - 124,5,124,0,106,2,131,3,124,4,95,4,124,4,83,0, - 100,1,83,0,110,4,124,4,83,0,100,1,83,0,41,2, - 122,141,84,114,121,32,116,111,32,102,105,110,100,32,97,32, - 115,112,101,99,32,102,111,114,32,39,102,117,108,108,110,97, - 109,101,39,32,111,110,32,115,121,115,46,112,97,116,104,32, - 111,114,32,39,112,97,116,104,39,46,10,10,32,32,32,32, - 32,32,32,32,84,104,101,32,115,101,97,114,99,104,32,105, - 115,32,98,97,115,101,100,32,111,110,32,115,121,115,46,112, - 97,116,104,95,104,111,111,107,115,32,97,110,100,32,115,121, + 114,95,99,97,99,104,101,46,10,32,32,32,32,32,32,32, + 32,78,41,7,114,1,0,0,0,114,44,0,0,0,114,51, + 1,0,0,114,140,0,0,0,114,178,0,0,0,114,181,0, + 0,0,114,13,1,0,0,41,6,114,193,0,0,0,114,139, + 0,0,0,114,44,0,0,0,114,202,0,0,0,114,187,0, + 0,0,114,50,1,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,114,203,0,0,0,39,5,0,0,115, + 26,0,0,0,0,6,8,1,6,1,14,1,8,1,4,1, + 10,1,6,1,4,3,6,1,16,1,4,2,6,2,122,20, + 80,97,116,104,70,105,110,100,101,114,46,102,105,110,100,95, + 115,112,101,99,99,3,0,0,0,0,0,0,0,0,0,0, + 0,4,0,0,0,4,0,0,0,67,0,0,0,115,30,0, + 0,0,124,0,160,0,124,1,124,2,161,2,125,3,124,3, + 100,1,117,0,114,24,100,1,83,0,124,3,106,1,83,0, + 41,2,122,170,102,105,110,100,32,116,104,101,32,109,111,100, + 117,108,101,32,111,110,32,115,121,115,46,112,97,116,104,32, + 111,114,32,39,112,97,116,104,39,32,98,97,115,101,100,32, + 111,110,32,115,121,115,46,112,97,116,104,95,104,111,111,107, + 115,32,97,110,100,10,32,32,32,32,32,32,32,32,115,121, 115,46,112,97,116,104,95,105,109,112,111,114,116,101,114,95, - 99,97,99,104,101,46,10,32,32,32,32,32,32,32,32,78, - 41,7,114,1,0,0,0,114,44,0,0,0,114,58,1,0, - 0,114,140,0,0,0,114,178,0,0,0,114,181,0,0,0, - 114,22,1,0,0,41,6,114,193,0,0,0,114,139,0,0, - 0,114,44,0,0,0,114,202,0,0,0,114,187,0,0,0, - 114,57,1,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,203,0,0,0,61,5,0,0,115,26,0, - 0,0,0,6,8,1,6,1,14,1,8,1,4,1,10,1, - 6,1,4,3,6,1,16,1,4,2,6,2,122,20,80,97, - 116,104,70,105,110,100,101,114,46,102,105,110,100,95,115,112, - 101,99,99,3,0,0,0,0,0,0,0,0,0,0,0,4, - 0,0,0,4,0,0,0,67,0,0,0,115,30,0,0,0, - 124,0,160,0,124,1,124,2,161,2,125,3,124,3,100,1, - 117,0,114,24,100,1,83,0,124,3,106,1,83,0,41,2, - 122,170,102,105,110,100,32,116,104,101,32,109,111,100,117,108, - 101,32,111,110,32,115,121,115,46,112,97,116,104,32,111,114, - 32,39,112,97,116,104,39,32,98,97,115,101,100,32,111,110, - 32,115,121,115,46,112,97,116,104,95,104,111,111,107,115,32, - 97,110,100,10,32,32,32,32,32,32,32,32,115,121,115,46, - 112,97,116,104,95,105,109,112,111,114,116,101,114,95,99,97, - 99,104,101,46,10,10,32,32,32,32,32,32,32,32,84,104, - 105,115,32,109,101,116,104,111,100,32,105,115,32,100,101,112, - 114,101,99,97,116,101,100,46,32,32,85,115,101,32,102,105, - 110,100,95,115,112,101,99,40,41,32,105,110,115,116,101,97, - 100,46,10,10,32,32,32,32,32,32,32,32,78,114,204,0, - 0,0,114,205,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,206,0,0,0,85,5,0,0,115, - 8,0,0,0,0,8,12,1,8,1,4,1,122,22,80,97, - 116,104,70,105,110,100,101,114,46,102,105,110,100,95,109,111, - 100,117,108,101,99,1,0,0,0,0,0,0,0,0,0,0, - 0,4,0,0,0,4,0,0,0,79,0,0,0,115,28,0, - 0,0,100,1,100,2,108,0,109,1,125,3,1,0,124,3, - 106,2,124,1,105,0,124,2,164,1,142,1,83,0,41,3, - 97,32,1,0,0,10,32,32,32,32,32,32,32,32,70,105, - 110,100,32,100,105,115,116,114,105,98,117,116,105,111,110,115, - 46,10,10,32,32,32,32,32,32,32,32,82,101,116,117,114, - 110,32,97,110,32,105,116,101,114,97,98,108,101,32,111,102, - 32,97,108,108,32,68,105,115,116,114,105,98,117,116,105,111, - 110,32,105,110,115,116,97,110,99,101,115,32,99,97,112,97, - 98,108,101,32,111,102,10,32,32,32,32,32,32,32,32,108, - 111,97,100,105,110,103,32,116,104,101,32,109,101,116,97,100, - 97,116,97,32,102,111,114,32,112,97,99,107,97,103,101,115, - 32,109,97,116,99,104,105,110,103,32,96,96,99,111,110,116, - 101,120,116,46,110,97,109,101,96,96,10,32,32,32,32,32, - 32,32,32,40,111,114,32,97,108,108,32,110,97,109,101,115, - 32,105,102,32,96,96,78,111,110,101,96,96,32,105,110,100, - 105,99,97,116,101,100,41,32,97,108,111,110,103,32,116,104, - 101,32,112,97,116,104,115,32,105,110,32,116,104,101,32,108, - 105,115,116,10,32,32,32,32,32,32,32,32,111,102,32,100, - 105,114,101,99,116,111,114,105,101,115,32,96,96,99,111,110, - 116,101,120,116,46,112,97,116,104,96,96,46,10,32,32,32, - 32,32,32,32,32,114,73,0,0,0,41,1,218,18,77,101, - 116,97,100,97,116,97,80,97,116,104,70,105,110,100,101,114, - 41,3,90,18,105,109,112,111,114,116,108,105,98,46,109,101, - 116,97,100,97,116,97,114,59,1,0,0,218,18,102,105,110, - 100,95,100,105,115,116,114,105,98,117,116,105,111,110,115,41, - 4,114,193,0,0,0,114,119,0,0,0,114,120,0,0,0, - 114,59,1,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,60,1,0,0,98,5,0,0,115,4,0, - 0,0,0,10,12,1,122,29,80,97,116,104,70,105,110,100, - 101,114,46,102,105,110,100,95,100,105,115,116,114,105,98,117, - 116,105,111,110,115,41,1,78,41,2,78,78,41,1,78,41, - 13,114,125,0,0,0,114,124,0,0,0,114,126,0,0,0, - 114,127,0,0,0,114,207,0,0,0,114,46,1,0,0,114, - 52,1,0,0,114,54,1,0,0,114,55,1,0,0,114,58, - 1,0,0,114,203,0,0,0,114,206,0,0,0,114,60,1, - 0,0,114,5,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,45,1,0,0,221,4,0,0,115, - 34,0,0,0,8,2,4,2,2,1,10,9,2,1,10,12, - 2,1,10,21,2,1,10,14,2,1,12,31,2,1,12,23, - 2,1,12,12,2,1,114,45,1,0,0,99,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0, - 64,0,0,0,115,90,0,0,0,101,0,90,1,100,0,90, - 2,100,1,90,3,100,2,100,3,132,0,90,4,100,4,100, - 5,132,0,90,5,101,6,90,7,100,6,100,7,132,0,90, - 8,100,8,100,9,132,0,90,9,100,19,100,11,100,12,132, - 1,90,10,100,13,100,14,132,0,90,11,101,12,100,15,100, - 16,132,0,131,1,90,13,100,17,100,18,132,0,90,14,100, - 10,83,0,41,20,218,10,70,105,108,101,70,105,110,100,101, - 114,122,172,70,105,108,101,45,98,97,115,101,100,32,102,105, - 110,100,101,114,46,10,10,32,32,32,32,73,110,116,101,114, - 97,99,116,105,111,110,115,32,119,105,116,104,32,116,104,101, - 32,102,105,108,101,32,115,121,115,116,101,109,32,97,114,101, - 32,99,97,99,104,101,100,32,102,111,114,32,112,101,114,102, - 111,114,109,97,110,99,101,44,32,98,101,105,110,103,10,32, - 32,32,32,114,101,102,114,101,115,104,101,100,32,119,104,101, - 110,32,116,104,101,32,100,105,114,101,99,116,111,114,121,32, - 116,104,101,32,102,105,110,100,101,114,32,105,115,32,104,97, - 110,100,108,105,110,103,32,104,97,115,32,98,101,101,110,32, - 109,111,100,105,102,105,101,100,46,10,10,32,32,32,32,99, - 2,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0, - 6,0,0,0,7,0,0,0,115,84,0,0,0,103,0,125, - 3,124,2,68,0,93,32,92,2,137,0,125,4,124,3,160, - 0,135,0,102,1,100,1,100,2,132,8,124,4,68,0,131, - 1,161,1,1,0,113,8,124,3,124,0,95,1,124,1,112, - 54,100,3,124,0,95,2,100,4,124,0,95,3,116,4,131, - 0,124,0,95,5,116,4,131,0,124,0,95,6,100,5,83, - 0,41,6,122,154,73,110,105,116,105,97,108,105,122,101,32, - 119,105,116,104,32,116,104,101,32,112,97,116,104,32,116,111, - 32,115,101,97,114,99,104,32,111,110,32,97,110,100,32,97, - 32,118,97,114,105,97,98,108,101,32,110,117,109,98,101,114, - 32,111,102,10,32,32,32,32,32,32,32,32,50,45,116,117, - 112,108,101,115,32,99,111,110,116,97,105,110,105,110,103,32, - 116,104,101,32,108,111,97,100,101,114,32,97,110,100,32,116, - 104,101,32,102,105,108,101,32,115,117,102,102,105,120,101,115, - 32,116,104,101,32,108,111,97,100,101,114,10,32,32,32,32, - 32,32,32,32,114,101,99,111,103,110,105,122,101,115,46,99, + 99,97,99,104,101,46,10,10,32,32,32,32,32,32,32,32, + 84,104,105,115,32,109,101,116,104,111,100,32,105,115,32,100, + 101,112,114,101,99,97,116,101,100,46,32,32,85,115,101,32, + 102,105,110,100,95,115,112,101,99,40,41,32,105,110,115,116, + 101,97,100,46,10,10,32,32,32,32,32,32,32,32,78,114, + 204,0,0,0,114,205,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,206,0,0,0,63,5,0, + 0,115,8,0,0,0,0,8,12,1,8,1,4,1,122,22, + 80,97,116,104,70,105,110,100,101,114,46,102,105,110,100,95, + 109,111,100,117,108,101,99,1,0,0,0,0,0,0,0,0, + 0,0,0,4,0,0,0,4,0,0,0,79,0,0,0,115, + 28,0,0,0,100,1,100,2,108,0,109,1,125,3,1,0, + 124,3,106,2,124,1,105,0,124,2,164,1,142,1,83,0, + 41,3,97,32,1,0,0,10,32,32,32,32,32,32,32,32, + 70,105,110,100,32,100,105,115,116,114,105,98,117,116,105,111, + 110,115,46,10,10,32,32,32,32,32,32,32,32,82,101,116, + 117,114,110,32,97,110,32,105,116,101,114,97,98,108,101,32, + 111,102,32,97,108,108,32,68,105,115,116,114,105,98,117,116, + 105,111,110,32,105,110,115,116,97,110,99,101,115,32,99,97, + 112,97,98,108,101,32,111,102,10,32,32,32,32,32,32,32, + 32,108,111,97,100,105,110,103,32,116,104,101,32,109,101,116, + 97,100,97,116,97,32,102,111,114,32,112,97,99,107,97,103, + 101,115,32,109,97,116,99,104,105,110,103,32,96,96,99,111, + 110,116,101,120,116,46,110,97,109,101,96,96,10,32,32,32, + 32,32,32,32,32,40,111,114,32,97,108,108,32,110,97,109, + 101,115,32,105,102,32,96,96,78,111,110,101,96,96,32,105, + 110,100,105,99,97,116,101,100,41,32,97,108,111,110,103,32, + 116,104,101,32,112,97,116,104,115,32,105,110,32,116,104,101, + 32,108,105,115,116,10,32,32,32,32,32,32,32,32,111,102, + 32,100,105,114,101,99,116,111,114,105,101,115,32,96,96,99, + 111,110,116,101,120,116,46,112,97,116,104,96,96,46,10,32, + 32,32,32,32,32,32,32,114,73,0,0,0,41,1,218,18, + 77,101,116,97,100,97,116,97,80,97,116,104,70,105,110,100, + 101,114,41,3,90,18,105,109,112,111,114,116,108,105,98,46, + 109,101,116,97,100,97,116,97,114,52,1,0,0,218,18,102, + 105,110,100,95,100,105,115,116,114,105,98,117,116,105,111,110, + 115,41,4,114,193,0,0,0,114,119,0,0,0,114,120,0, + 0,0,114,52,1,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,114,53,1,0,0,76,5,0,0,115, + 4,0,0,0,0,10,12,1,122,29,80,97,116,104,70,105, + 110,100,101,114,46,102,105,110,100,95,100,105,115,116,114,105, + 98,117,116,105,111,110,115,41,1,78,41,2,78,78,41,1, + 78,41,13,114,125,0,0,0,114,124,0,0,0,114,126,0, + 0,0,114,127,0,0,0,114,207,0,0,0,114,38,1,0, + 0,114,44,1,0,0,114,47,1,0,0,114,48,1,0,0, + 114,51,1,0,0,114,203,0,0,0,114,206,0,0,0,114, + 53,1,0,0,114,5,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,37,1,0,0,199,4,0, + 0,115,34,0,0,0,8,2,4,2,2,1,10,9,2,1, + 10,12,2,1,10,21,2,1,10,14,2,1,12,31,2,1, + 12,23,2,1,12,12,2,1,114,37,1,0,0,99,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0, + 0,0,64,0,0,0,115,90,0,0,0,101,0,90,1,100, + 0,90,2,100,1,90,3,100,2,100,3,132,0,90,4,100, + 4,100,5,132,0,90,5,101,6,90,7,100,6,100,7,132, + 0,90,8,100,8,100,9,132,0,90,9,100,19,100,11,100, + 12,132,1,90,10,100,13,100,14,132,0,90,11,101,12,100, + 15,100,16,132,0,131,1,90,13,100,17,100,18,132,0,90, + 14,100,10,83,0,41,20,218,10,70,105,108,101,70,105,110, + 100,101,114,122,172,70,105,108,101,45,98,97,115,101,100,32, + 102,105,110,100,101,114,46,10,10,32,32,32,32,73,110,116, + 101,114,97,99,116,105,111,110,115,32,119,105,116,104,32,116, + 104,101,32,102,105,108,101,32,115,121,115,116,101,109,32,97, + 114,101,32,99,97,99,104,101,100,32,102,111,114,32,112,101, + 114,102,111,114,109,97,110,99,101,44,32,98,101,105,110,103, + 10,32,32,32,32,114,101,102,114,101,115,104,101,100,32,119, + 104,101,110,32,116,104,101,32,100,105,114,101,99,116,111,114, + 121,32,116,104,101,32,102,105,110,100,101,114,32,105,115,32, + 104,97,110,100,108,105,110,103,32,104,97,115,32,98,101,101, + 110,32,109,111,100,105,102,105,101,100,46,10,10,32,32,32, + 32,99,2,0,0,0,0,0,0,0,0,0,0,0,5,0, + 0,0,6,0,0,0,7,0,0,0,115,84,0,0,0,103, + 0,125,3,124,2,68,0,93,32,92,2,137,0,125,4,124, + 3,160,0,135,0,102,1,100,1,100,2,132,8,124,4,68, + 0,131,1,161,1,1,0,113,8,124,3,124,0,95,1,124, + 1,112,54,100,3,124,0,95,2,100,4,124,0,95,3,116, + 4,131,0,124,0,95,5,116,4,131,0,124,0,95,6,100, + 5,83,0,41,6,122,154,73,110,105,116,105,97,108,105,122, + 101,32,119,105,116,104,32,116,104,101,32,112,97,116,104,32, + 116,111,32,115,101,97,114,99,104,32,111,110,32,97,110,100, + 32,97,32,118,97,114,105,97,98,108,101,32,110,117,109,98, + 101,114,32,111,102,10,32,32,32,32,32,32,32,32,50,45, + 116,117,112,108,101,115,32,99,111,110,116,97,105,110,105,110, + 103,32,116,104,101,32,108,111,97,100,101,114,32,97,110,100, + 32,116,104,101,32,102,105,108,101,32,115,117,102,102,105,120, + 101,115,32,116,104,101,32,108,111,97,100,101,114,10,32,32, + 32,32,32,32,32,32,114,101,99,111,103,110,105,122,101,115, + 46,99,1,0,0,0,0,0,0,0,0,0,0,0,2,0, + 0,0,3,0,0,0,51,0,0,0,115,22,0,0,0,124, + 0,93,14,125,1,124,1,136,0,102,2,86,0,1,0,113, + 2,100,0,83,0,114,109,0,0,0,114,5,0,0,0,114, + 7,1,0,0,169,1,114,140,0,0,0,114,5,0,0,0, + 114,8,0,0,0,114,10,1,0,0,105,5,0,0,243,0, + 0,0,0,122,38,70,105,108,101,70,105,110,100,101,114,46, + 95,95,105,110,105,116,95,95,46,60,108,111,99,97,108,115, + 62,46,60,103,101,110,101,120,112,114,62,114,71,0,0,0, + 114,104,0,0,0,78,41,7,114,167,0,0,0,218,8,95, + 108,111,97,100,101,114,115,114,44,0,0,0,218,11,95,112, + 97,116,104,95,109,116,105,109,101,218,3,115,101,116,218,11, + 95,112,97,116,104,95,99,97,99,104,101,218,19,95,114,101, + 108,97,120,101,100,95,112,97,116,104,95,99,97,99,104,101, + 41,5,114,118,0,0,0,114,44,0,0,0,218,14,108,111, + 97,100,101,114,95,100,101,116,97,105,108,115,90,7,108,111, + 97,100,101,114,115,114,189,0,0,0,114,5,0,0,0,114, + 55,1,0,0,114,8,0,0,0,114,209,0,0,0,99,5, + 0,0,115,16,0,0,0,0,4,4,1,12,1,26,1,6, + 2,10,1,6,1,8,1,122,19,70,105,108,101,70,105,110, + 100,101,114,46,95,95,105,110,105,116,95,95,99,1,0,0, + 0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0, + 0,67,0,0,0,115,10,0,0,0,100,1,124,0,95,0, + 100,2,83,0,41,3,122,31,73,110,118,97,108,105,100,97, + 116,101,32,116,104,101,32,100,105,114,101,99,116,111,114,121, + 32,109,116,105,109,101,46,114,104,0,0,0,78,41,1,114, + 58,1,0,0,114,246,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,38,1,0,0,113,5,0, + 0,115,2,0,0,0,0,2,122,28,70,105,108,101,70,105, + 110,100,101,114,46,105,110,118,97,108,105,100,97,116,101,95, + 99,97,99,104,101,115,99,2,0,0,0,0,0,0,0,0, + 0,0,0,3,0,0,0,3,0,0,0,67,0,0,0,115, + 42,0,0,0,124,0,160,0,124,1,161,1,125,2,124,2, + 100,1,117,0,114,26,100,1,103,0,102,2,83,0,124,2, + 106,1,124,2,106,2,112,38,103,0,102,2,83,0,41,2, + 122,197,84,114,121,32,116,111,32,102,105,110,100,32,97,32, + 108,111,97,100,101,114,32,102,111,114,32,116,104,101,32,115, + 112,101,99,105,102,105,101,100,32,109,111,100,117,108,101,44, + 32,111,114,32,116,104,101,32,110,97,109,101,115,112,97,99, + 101,10,32,32,32,32,32,32,32,32,112,97,99,107,97,103, + 101,32,112,111,114,116,105,111,110,115,46,32,82,101,116,117, + 114,110,115,32,40,108,111,97,100,101,114,44,32,108,105,115, + 116,45,111,102,45,112,111,114,116,105,111,110,115,41,46,10, + 10,32,32,32,32,32,32,32,32,84,104,105,115,32,109,101, + 116,104,111,100,32,105,115,32,100,101,112,114,101,99,97,116, + 101,100,46,32,32,85,115,101,32,102,105,110,100,95,115,112, + 101,99,40,41,32,105,110,115,116,101,97,100,46,10,10,32, + 32,32,32,32,32,32,32,78,41,3,114,203,0,0,0,114, + 140,0,0,0,114,178,0,0,0,41,3,114,118,0,0,0, + 114,139,0,0,0,114,187,0,0,0,114,5,0,0,0,114, + 5,0,0,0,114,8,0,0,0,114,137,0,0,0,119,5, + 0,0,115,8,0,0,0,0,7,10,1,8,1,8,1,122, + 22,70,105,108,101,70,105,110,100,101,114,46,102,105,110,100, + 95,108,111,97,100,101,114,99,6,0,0,0,0,0,0,0, + 0,0,0,0,7,0,0,0,6,0,0,0,67,0,0,0, + 115,26,0,0,0,124,1,124,2,124,3,131,2,125,6,116, + 0,124,2,124,3,124,6,124,4,100,1,141,4,83,0,41, + 2,78,114,177,0,0,0,41,1,114,190,0,0,0,41,7, + 114,118,0,0,0,114,188,0,0,0,114,139,0,0,0,114, + 44,0,0,0,90,4,115,109,115,108,114,202,0,0,0,114, + 140,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,114,51,1,0,0,131,5,0,0,115,8,0,0, + 0,0,1,10,1,8,1,2,255,122,20,70,105,108,101,70, + 105,110,100,101,114,46,95,103,101,116,95,115,112,101,99,78, + 99,3,0,0,0,0,0,0,0,0,0,0,0,14,0,0, + 0,8,0,0,0,67,0,0,0,115,96,1,0,0,100,1, + 125,3,124,1,160,0,100,2,161,1,100,3,25,0,125,4, + 122,24,116,1,124,0,106,2,112,34,116,3,160,4,161,0, + 131,1,106,5,125,5,87,0,110,22,4,0,116,6,121,64, + 1,0,1,0,1,0,100,4,125,5,89,0,110,2,48,0, + 124,5,124,0,106,7,107,3,114,90,124,0,160,8,161,0, + 1,0,124,5,124,0,95,7,116,9,131,0,114,112,124,0, + 106,10,125,6,124,4,160,11,161,0,125,7,110,10,124,0, + 106,12,125,6,124,4,125,7,124,7,124,6,118,0,114,216, + 116,13,124,0,106,2,124,4,131,2,125,8,124,0,106,14, + 68,0,93,58,92,2,125,9,125,10,100,5,124,9,23,0, + 125,11,116,13,124,8,124,11,131,2,125,12,116,15,124,12, + 131,1,114,148,124,0,160,16,124,10,124,1,124,12,124,8, + 103,1,124,2,161,5,2,0,1,0,83,0,113,148,116,17, + 124,8,131,1,125,3,124,0,106,14,68,0,93,82,92,2, + 125,9,125,10,116,13,124,0,106,2,124,4,124,9,23,0, + 131,2,125,12,116,18,106,19,100,6,124,12,100,3,100,7, + 141,3,1,0,124,7,124,9,23,0,124,6,118,0,114,222, + 116,15,124,12,131,1,114,222,124,0,160,16,124,10,124,1, + 124,12,100,8,124,2,161,5,2,0,1,0,83,0,113,222, + 124,3,144,1,114,92,116,18,160,19,100,9,124,8,161,2, + 1,0,116,18,160,20,124,1,100,8,161,2,125,13,124,8, + 103,1,124,13,95,21,124,13,83,0,100,8,83,0,41,10, + 122,111,84,114,121,32,116,111,32,102,105,110,100,32,97,32, + 115,112,101,99,32,102,111,114,32,116,104,101,32,115,112,101, + 99,105,102,105,101,100,32,109,111,100,117,108,101,46,10,10, + 32,32,32,32,32,32,32,32,82,101,116,117,114,110,115,32, + 116,104,101,32,109,97,116,99,104,105,110,103,32,115,112,101, + 99,44,32,111,114,32,78,111,110,101,32,105,102,32,110,111, + 116,32,102,111,117,110,100,46,10,32,32,32,32,32,32,32, + 32,70,114,71,0,0,0,114,28,0,0,0,114,104,0,0, + 0,114,209,0,0,0,122,9,116,114,121,105,110,103,32,123, + 125,41,1,90,9,118,101,114,98,111,115,105,116,121,78,122, + 25,112,111,115,115,105,98,108,101,32,110,97,109,101,115,112, + 97,99,101,32,102,111,114,32,123,125,41,22,114,41,0,0, + 0,114,49,0,0,0,114,44,0,0,0,114,4,0,0,0, + 114,55,0,0,0,114,0,1,0,0,114,50,0,0,0,114, + 58,1,0,0,218,11,95,102,105,108,108,95,99,97,99,104, + 101,114,9,0,0,0,114,61,1,0,0,114,105,0,0,0, + 114,60,1,0,0,114,38,0,0,0,114,57,1,0,0,114, + 54,0,0,0,114,51,1,0,0,114,56,0,0,0,114,134, + 0,0,0,114,149,0,0,0,114,183,0,0,0,114,178,0, + 0,0,41,14,114,118,0,0,0,114,139,0,0,0,114,202, + 0,0,0,90,12,105,115,95,110,97,109,101,115,112,97,99, + 101,90,11,116,97,105,108,95,109,111,100,117,108,101,114,169, + 0,0,0,90,5,99,97,99,104,101,90,12,99,97,99,104, + 101,95,109,111,100,117,108,101,90,9,98,97,115,101,95,112, + 97,116,104,114,8,1,0,0,114,188,0,0,0,90,13,105, + 110,105,116,95,102,105,108,101,110,97,109,101,90,9,102,117, + 108,108,95,112,97,116,104,114,187,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,114,203,0,0,0, + 136,5,0,0,115,72,0,0,0,0,5,4,1,14,1,2, + 1,24,1,12,1,10,1,10,1,8,1,6,2,6,1,6, + 1,10,2,6,1,4,2,8,1,12,1,14,1,8,1,10, + 1,8,1,26,4,8,2,14,1,16,1,16,1,12,1,8, + 1,10,1,4,255,10,2,6,1,12,1,12,1,8,1,4, + 1,122,20,70,105,108,101,70,105,110,100,101,114,46,102,105, + 110,100,95,115,112,101,99,99,1,0,0,0,0,0,0,0, + 0,0,0,0,9,0,0,0,10,0,0,0,67,0,0,0, + 115,188,0,0,0,124,0,106,0,125,1,122,22,116,1,160, + 2,124,1,112,22,116,1,160,3,161,0,161,1,125,2,87, + 0,110,28,4,0,116,4,116,5,116,6,102,3,121,56,1, + 0,1,0,1,0,103,0,125,2,89,0,110,2,48,0,116, + 7,106,8,160,9,100,1,161,1,115,82,116,10,124,2,131, + 1,124,0,95,11,110,74,116,10,131,0,125,3,124,2,68, + 0,93,56,125,4,124,4,160,12,100,2,161,1,92,3,125, + 5,125,6,125,7,124,6,114,134,100,3,160,13,124,5,124, + 7,160,14,161,0,161,2,125,8,110,4,124,5,125,8,124, + 3,160,15,124,8,161,1,1,0,113,92,124,3,124,0,95, + 11,116,7,106,8,160,9,116,16,161,1,114,184,100,4,100, + 5,132,0,124,2,68,0,131,1,124,0,95,17,100,6,83, + 0,41,7,122,68,70,105,108,108,32,116,104,101,32,99,97, + 99,104,101,32,111,102,32,112,111,116,101,110,116,105,97,108, + 32,109,111,100,117,108,101,115,32,97,110,100,32,112,97,99, + 107,97,103,101,115,32,102,111,114,32,116,104,105,115,32,100, + 105,114,101,99,116,111,114,121,46,114,0,0,0,0,114,71, + 0,0,0,114,61,0,0,0,99,1,0,0,0,0,0,0, + 0,0,0,0,0,2,0,0,0,4,0,0,0,83,0,0, + 0,115,20,0,0,0,104,0,124,0,93,12,125,1,124,1, + 160,0,161,0,146,2,113,4,83,0,114,5,0,0,0,41, + 1,114,105,0,0,0,41,2,114,32,0,0,0,90,2,102, + 110,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 218,9,60,115,101,116,99,111,109,112,62,213,5,0,0,114, + 56,1,0,0,122,41,70,105,108,101,70,105,110,100,101,114, + 46,95,102,105,108,108,95,99,97,99,104,101,46,60,108,111, + 99,97,108,115,62,46,60,115,101,116,99,111,109,112,62,78, + 41,18,114,44,0,0,0,114,4,0,0,0,90,7,108,105, + 115,116,100,105,114,114,55,0,0,0,114,45,1,0,0,218, + 15,80,101,114,109,105,115,115,105,111,110,69,114,114,111,114, + 218,18,78,111,116,65,68,105,114,101,99,116,111,114,121,69, + 114,114,111,114,114,1,0,0,0,114,10,0,0,0,114,11, + 0,0,0,114,59,1,0,0,114,60,1,0,0,114,100,0, + 0,0,114,62,0,0,0,114,105,0,0,0,218,3,97,100, + 100,114,12,0,0,0,114,61,1,0,0,41,9,114,118,0, + 0,0,114,44,0,0,0,90,8,99,111,110,116,101,110,116, + 115,90,21,108,111,119,101,114,95,115,117,102,102,105,120,95, + 99,111,110,116,101,110,116,115,114,33,1,0,0,114,116,0, + 0,0,114,20,1,0,0,114,8,1,0,0,90,8,110,101, + 119,95,110,97,109,101,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,114,63,1,0,0,184,5,0,0,115,34, + 0,0,0,0,2,6,1,2,1,22,1,18,3,10,3,12, + 1,12,7,6,1,8,1,16,1,4,1,18,2,4,1,12, + 1,6,1,12,1,122,22,70,105,108,101,70,105,110,100,101, + 114,46,95,102,105,108,108,95,99,97,99,104,101,99,1,0, + 0,0,0,0,0,0,0,0,0,0,3,0,0,0,3,0, + 0,0,7,0,0,0,115,18,0,0,0,135,0,135,1,102, + 2,100,1,100,2,132,8,125,2,124,2,83,0,41,3,97, + 20,1,0,0,65,32,99,108,97,115,115,32,109,101,116,104, + 111,100,32,119,104,105,99,104,32,114,101,116,117,114,110,115, + 32,97,32,99,108,111,115,117,114,101,32,116,111,32,117,115, + 101,32,111,110,32,115,121,115,46,112,97,116,104,95,104,111, + 111,107,10,32,32,32,32,32,32,32,32,119,104,105,99,104, + 32,119,105,108,108,32,114,101,116,117,114,110,32,97,110,32, + 105,110,115,116,97,110,99,101,32,117,115,105,110,103,32,116, + 104,101,32,115,112,101,99,105,102,105,101,100,32,108,111,97, + 100,101,114,115,32,97,110,100,32,116,104,101,32,112,97,116, + 104,10,32,32,32,32,32,32,32,32,99,97,108,108,101,100, + 32,111,110,32,116,104,101,32,99,108,111,115,117,114,101,46, + 10,10,32,32,32,32,32,32,32,32,73,102,32,116,104,101, + 32,112,97,116,104,32,99,97,108,108,101,100,32,111,110,32, + 116,104,101,32,99,108,111,115,117,114,101,32,105,115,32,110, + 111,116,32,97,32,100,105,114,101,99,116,111,114,121,44,32, + 73,109,112,111,114,116,69,114,114,111,114,32,105,115,10,32, + 32,32,32,32,32,32,32,114,97,105,115,101,100,46,10,10, + 32,32,32,32,32,32,32,32,99,1,0,0,0,0,0,0, + 0,0,0,0,0,1,0,0,0,4,0,0,0,19,0,0, + 0,115,36,0,0,0,116,0,124,0,131,1,115,20,116,1, + 100,1,124,0,100,2,141,2,130,1,136,0,124,0,103,1, + 136,1,162,1,82,0,142,0,83,0,41,3,122,45,80,97, + 116,104,32,104,111,111,107,32,102,111,114,32,105,109,112,111, + 114,116,108,105,98,46,109,97,99,104,105,110,101,114,121,46, + 70,105,108,101,70,105,110,100,101,114,46,122,30,111,110,108, + 121,32,100,105,114,101,99,116,111,114,105,101,115,32,97,114, + 101,32,115,117,112,112,111,114,116,101,100,114,48,0,0,0, + 41,2,114,56,0,0,0,114,117,0,0,0,114,48,0,0, + 0,169,2,114,193,0,0,0,114,62,1,0,0,114,5,0, + 0,0,114,8,0,0,0,218,24,112,97,116,104,95,104,111, + 111,107,95,102,111,114,95,70,105,108,101,70,105,110,100,101, + 114,225,5,0,0,115,6,0,0,0,0,2,8,1,12,1, + 122,54,70,105,108,101,70,105,110,100,101,114,46,112,97,116, + 104,95,104,111,111,107,46,60,108,111,99,97,108,115,62,46, + 112,97,116,104,95,104,111,111,107,95,102,111,114,95,70,105, + 108,101,70,105,110,100,101,114,114,5,0,0,0,41,3,114, + 193,0,0,0,114,62,1,0,0,114,69,1,0,0,114,5, + 0,0,0,114,68,1,0,0,114,8,0,0,0,218,9,112, + 97,116,104,95,104,111,111,107,215,5,0,0,115,4,0,0, + 0,0,10,14,6,122,20,70,105,108,101,70,105,110,100,101, + 114,46,112,97,116,104,95,104,111,111,107,99,1,0,0,0, + 0,0,0,0,0,0,0,0,1,0,0,0,3,0,0,0, + 67,0,0,0,115,12,0,0,0,100,1,160,0,124,0,106, + 1,161,1,83,0,41,2,78,122,16,70,105,108,101,70,105, + 110,100,101,114,40,123,33,114,125,41,41,2,114,62,0,0, + 0,114,44,0,0,0,114,246,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,31,1,0,0,233, + 5,0,0,115,2,0,0,0,0,1,122,19,70,105,108,101, + 70,105,110,100,101,114,46,95,95,114,101,112,114,95,95,41, + 1,78,41,15,114,125,0,0,0,114,124,0,0,0,114,126, + 0,0,0,114,127,0,0,0,114,209,0,0,0,114,38,1, + 0,0,114,143,0,0,0,114,206,0,0,0,114,137,0,0, + 0,114,51,1,0,0,114,203,0,0,0,114,63,1,0,0, + 114,207,0,0,0,114,70,1,0,0,114,31,1,0,0,114, + 5,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,114,54,1,0,0,90,5,0,0,115,22,0,0, + 0,8,2,4,7,8,14,8,4,4,2,8,12,8,5,10, + 48,8,31,2,1,10,17,114,54,1,0,0,99,4,0,0, + 0,0,0,0,0,0,0,0,0,6,0,0,0,8,0,0, + 0,67,0,0,0,115,144,0,0,0,124,0,160,0,100,1, + 161,1,125,4,124,0,160,0,100,2,161,1,125,5,124,4, + 115,66,124,5,114,36,124,5,106,1,125,4,110,30,124,2, + 124,3,107,2,114,56,116,2,124,1,124,2,131,2,125,4, + 110,10,116,3,124,1,124,2,131,2,125,4,124,5,115,84, + 116,4,124,1,124,2,124,4,100,3,141,3,125,5,122,36, + 124,5,124,0,100,2,60,0,124,4,124,0,100,1,60,0, + 124,2,124,0,100,4,60,0,124,3,124,0,100,5,60,0, + 87,0,110,18,4,0,116,5,121,138,1,0,1,0,1,0, + 89,0,110,2,48,0,100,0,83,0,41,6,78,218,10,95, + 95,108,111,97,100,101,114,95,95,218,8,95,95,115,112,101, + 99,95,95,114,55,1,0,0,90,8,95,95,102,105,108,101, + 95,95,90,10,95,95,99,97,99,104,101,100,95,95,41,6, + 218,3,103,101,116,114,140,0,0,0,114,5,1,0,0,114, + 255,0,0,0,114,190,0,0,0,218,9,69,120,99,101,112, + 116,105,111,110,41,6,90,2,110,115,114,116,0,0,0,90, + 8,112,97,116,104,110,97,109,101,90,9,99,112,97,116,104, + 110,97,109,101,114,140,0,0,0,114,187,0,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,14,95, + 102,105,120,95,117,112,95,109,111,100,117,108,101,239,5,0, + 0,115,34,0,0,0,0,2,10,1,10,1,4,1,4,1, + 8,1,8,1,12,2,10,1,4,1,14,1,2,1,8,1, + 8,1,8,1,12,1,12,2,114,75,1,0,0,99,0,0, + 0,0,0,0,0,0,0,0,0,0,3,0,0,0,3,0, + 0,0,67,0,0,0,115,38,0,0,0,116,0,116,1,160, + 2,161,0,102,2,125,0,116,3,116,4,102,2,125,1,116, + 5,116,6,102,2,125,2,124,0,124,1,124,2,103,3,83, + 0,41,1,122,95,82,101,116,117,114,110,115,32,97,32,108, + 105,115,116,32,111,102,32,102,105,108,101,45,98,97,115,101, + 100,32,109,111,100,117,108,101,32,108,111,97,100,101,114,115, + 46,10,10,32,32,32,32,69,97,99,104,32,105,116,101,109, + 32,105,115,32,97,32,116,117,112,108,101,32,40,108,111,97, + 100,101,114,44,32,115,117,102,102,105,120,101,115,41,46,10, + 32,32,32,32,41,7,114,252,0,0,0,114,163,0,0,0, + 218,18,101,120,116,101,110,115,105,111,110,95,115,117,102,102, + 105,120,101,115,114,255,0,0,0,114,101,0,0,0,114,5, + 1,0,0,114,88,0,0,0,41,3,90,10,101,120,116,101, + 110,115,105,111,110,115,90,6,115,111,117,114,99,101,90,8, + 98,121,116,101,99,111,100,101,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,184,0,0,0,6,6,0,0, + 115,8,0,0,0,0,5,12,1,8,1,8,1,114,184,0, + 0,0,99,1,0,0,0,0,0,0,0,0,0,0,0,10, + 0,0,0,9,0,0,0,67,0,0,0,115,132,1,0,0, + 124,0,97,0,116,0,106,1,97,1,116,0,106,2,97,2, + 116,1,106,3,116,4,25,0,125,1,100,1,100,2,103,1, + 102,2,100,3,100,4,100,2,103,2,102,2,102,2,125,2, + 124,2,68,0,93,108,92,2,125,3,125,4,116,5,100,5, + 100,6,132,0,124,4,68,0,131,1,131,1,115,82,74,0, + 130,1,124,4,100,7,25,0,125,5,124,3,116,1,106,3, + 118,0,114,116,116,1,106,3,124,3,25,0,125,6,1,0, + 113,170,113,52,122,20,116,0,160,6,124,3,161,1,125,6, + 87,0,1,0,113,170,87,0,113,52,4,0,116,7,121,158, + 1,0,1,0,1,0,89,0,113,52,89,0,113,52,48,0, + 113,52,116,7,100,8,131,1,130,1,116,8,124,1,100,9, + 124,6,131,3,1,0,116,8,124,1,100,10,124,5,131,3, + 1,0,116,8,124,1,100,11,100,12,160,9,124,4,161,1, + 131,3,1,0,116,8,124,1,100,13,100,14,100,15,132,0, + 124,4,68,0,131,1,131,3,1,0,103,0,100,16,162,1, + 125,7,124,3,100,3,107,2,144,1,114,6,124,7,160,10, + 100,17,161,1,1,0,124,7,68,0,93,52,125,8,124,8, + 116,1,106,3,118,1,144,1,114,38,116,0,160,6,124,8, + 161,1,125,9,110,10,116,1,106,3,124,8,25,0,125,9, + 116,8,124,1,124,8,124,9,131,3,1,0,144,1,113,10, + 116,8,124,1,100,18,116,11,131,0,131,3,1,0,116,12, + 160,13,116,2,160,14,161,0,161,1,1,0,124,3,100,3, + 107,2,144,1,114,128,116,15,160,10,100,19,161,1,1,0, + 100,20,116,12,118,0,144,1,114,128,100,21,116,16,95,17, + 100,22,83,0,41,23,122,205,83,101,116,117,112,32,116,104, + 101,32,112,97,116,104,45,98,97,115,101,100,32,105,109,112, + 111,114,116,101,114,115,32,102,111,114,32,105,109,112,111,114, + 116,108,105,98,32,98,121,32,105,109,112,111,114,116,105,110, + 103,32,110,101,101,100,101,100,10,32,32,32,32,98,117,105, + 108,116,45,105,110,32,109,111,100,117,108,101,115,32,97,110, + 100,32,105,110,106,101,99,116,105,110,103,32,116,104,101,109, + 32,105,110,116,111,32,116,104,101,32,103,108,111,98,97,108, + 32,110,97,109,101,115,112,97,99,101,46,10,10,32,32,32, + 32,79,116,104,101,114,32,99,111,109,112,111,110,101,110,116, + 115,32,97,114,101,32,101,120,116,114,97,99,116,101,100,32, + 102,114,111,109,32,116,104,101,32,99,111,114,101,32,98,111, + 111,116,115,116,114,97,112,32,109,111,100,117,108,101,46,10, + 10,32,32,32,32,90,5,112,111,115,105,120,250,1,47,90, + 2,110,116,250,1,92,99,1,0,0,0,0,0,0,0,0, + 0,0,0,2,0,0,0,3,0,0,0,115,0,0,0,115, + 26,0,0,0,124,0,93,18,125,1,116,0,124,1,131,1, + 100,0,107,2,86,0,1,0,113,2,100,1,83,0,41,2, + 114,39,0,0,0,78,41,1,114,23,0,0,0,41,2,114, + 32,0,0,0,114,94,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,10,1,0,0,35,6,0, + 0,114,56,1,0,0,122,25,95,115,101,116,117,112,46,60, + 108,111,99,97,108,115,62,46,60,103,101,110,101,120,112,114, + 62,114,73,0,0,0,122,30,105,109,112,111,114,116,108,105, + 98,32,114,101,113,117,105,114,101,115,32,112,111,115,105,120, + 32,111,114,32,110,116,114,4,0,0,0,114,35,0,0,0, + 114,31,0,0,0,114,40,0,0,0,114,58,0,0,0,99, 1,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 3,0,0,0,51,0,0,0,115,22,0,0,0,124,0,93, - 14,125,1,124,1,136,0,102,2,86,0,1,0,113,2,100, - 0,83,0,114,109,0,0,0,114,5,0,0,0,114,16,1, - 0,0,169,1,114,140,0,0,0,114,5,0,0,0,114,8, - 0,0,0,114,19,1,0,0,127,5,0,0,243,0,0,0, - 0,122,38,70,105,108,101,70,105,110,100,101,114,46,95,95, - 105,110,105,116,95,95,46,60,108,111,99,97,108,115,62,46, - 60,103,101,110,101,120,112,114,62,114,71,0,0,0,114,104, - 0,0,0,78,41,7,114,167,0,0,0,218,8,95,108,111, - 97,100,101,114,115,114,44,0,0,0,218,11,95,112,97,116, - 104,95,109,116,105,109,101,218,3,115,101,116,218,11,95,112, - 97,116,104,95,99,97,99,104,101,218,19,95,114,101,108,97, - 120,101,100,95,112,97,116,104,95,99,97,99,104,101,41,5, - 114,118,0,0,0,114,44,0,0,0,218,14,108,111,97,100, - 101,114,95,100,101,116,97,105,108,115,90,7,108,111,97,100, - 101,114,115,114,189,0,0,0,114,5,0,0,0,114,62,1, - 0,0,114,8,0,0,0,114,209,0,0,0,121,5,0,0, - 115,16,0,0,0,0,4,4,1,12,1,26,1,6,2,10, - 1,6,1,8,1,122,19,70,105,108,101,70,105,110,100,101, - 114,46,95,95,105,110,105,116,95,95,99,1,0,0,0,0, - 0,0,0,0,0,0,0,1,0,0,0,2,0,0,0,67, - 0,0,0,115,10,0,0,0,100,1,124,0,95,0,100,2, - 83,0,41,3,122,31,73,110,118,97,108,105,100,97,116,101, - 32,116,104,101,32,100,105,114,101,99,116,111,114,121,32,109, - 116,105,109,101,46,114,104,0,0,0,78,41,1,114,65,1, - 0,0,114,246,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,46,1,0,0,135,5,0,0,115, - 2,0,0,0,0,2,122,28,70,105,108,101,70,105,110,100, - 101,114,46,105,110,118,97,108,105,100,97,116,101,95,99,97, - 99,104,101,115,99,2,0,0,0,0,0,0,0,0,0,0, - 0,3,0,0,0,3,0,0,0,67,0,0,0,115,42,0, - 0,0,124,0,160,0,124,1,161,1,125,2,124,2,100,1, - 117,0,114,26,100,1,103,0,102,2,83,0,124,2,106,1, - 124,2,106,2,112,38,103,0,102,2,83,0,41,2,122,197, - 84,114,121,32,116,111,32,102,105,110,100,32,97,32,108,111, - 97,100,101,114,32,102,111,114,32,116,104,101,32,115,112,101, - 99,105,102,105,101,100,32,109,111,100,117,108,101,44,32,111, - 114,32,116,104,101,32,110,97,109,101,115,112,97,99,101,10, - 32,32,32,32,32,32,32,32,112,97,99,107,97,103,101,32, - 112,111,114,116,105,111,110,115,46,32,82,101,116,117,114,110, - 115,32,40,108,111,97,100,101,114,44,32,108,105,115,116,45, - 111,102,45,112,111,114,116,105,111,110,115,41,46,10,10,32, - 32,32,32,32,32,32,32,84,104,105,115,32,109,101,116,104, - 111,100,32,105,115,32,100,101,112,114,101,99,97,116,101,100, - 46,32,32,85,115,101,32,102,105,110,100,95,115,112,101,99, - 40,41,32,105,110,115,116,101,97,100,46,10,10,32,32,32, - 32,32,32,32,32,78,41,3,114,203,0,0,0,114,140,0, - 0,0,114,178,0,0,0,41,3,114,118,0,0,0,114,139, - 0,0,0,114,187,0,0,0,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,137,0,0,0,141,5,0,0, - 115,8,0,0,0,0,7,10,1,8,1,8,1,122,22,70, - 105,108,101,70,105,110,100,101,114,46,102,105,110,100,95,108, - 111,97,100,101,114,99,6,0,0,0,0,0,0,0,0,0, - 0,0,7,0,0,0,6,0,0,0,67,0,0,0,115,26, - 0,0,0,124,1,124,2,124,3,131,2,125,6,116,0,124, - 2,124,3,124,6,124,4,100,1,141,4,83,0,41,2,78, - 114,177,0,0,0,41,1,114,190,0,0,0,41,7,114,118, - 0,0,0,114,188,0,0,0,114,139,0,0,0,114,44,0, - 0,0,90,4,115,109,115,108,114,202,0,0,0,114,140,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,114,58,1,0,0,153,5,0,0,115,8,0,0,0,0, - 1,10,1,8,1,2,255,122,20,70,105,108,101,70,105,110, - 100,101,114,46,95,103,101,116,95,115,112,101,99,78,99,3, - 0,0,0,0,0,0,0,0,0,0,0,14,0,0,0,8, - 0,0,0,67,0,0,0,115,96,1,0,0,100,1,125,3, - 124,1,160,0,100,2,161,1,100,3,25,0,125,4,122,24, - 116,1,124,0,106,2,112,34,116,3,160,4,161,0,131,1, - 106,5,125,5,87,0,110,22,4,0,116,6,121,64,1,0, - 1,0,1,0,100,4,125,5,89,0,110,2,48,0,124,5, - 124,0,106,7,107,3,114,90,124,0,160,8,161,0,1,0, - 124,5,124,0,95,7,116,9,131,0,114,112,124,0,106,10, - 125,6,124,4,160,11,161,0,125,7,110,10,124,0,106,12, - 125,6,124,4,125,7,124,7,124,6,118,0,114,216,116,13, - 124,0,106,2,124,4,131,2,125,8,124,0,106,14,68,0, - 93,58,92,2,125,9,125,10,100,5,124,9,23,0,125,11, - 116,13,124,8,124,11,131,2,125,12,116,15,124,12,131,1, - 114,148,124,0,160,16,124,10,124,1,124,12,124,8,103,1, - 124,2,161,5,2,0,1,0,83,0,113,148,116,17,124,8, - 131,1,125,3,124,0,106,14,68,0,93,82,92,2,125,9, - 125,10,116,13,124,0,106,2,124,4,124,9,23,0,131,2, - 125,12,116,18,106,19,100,6,124,12,100,3,100,7,141,3, - 1,0,124,7,124,9,23,0,124,6,118,0,114,222,116,15, - 124,12,131,1,114,222,124,0,160,16,124,10,124,1,124,12, - 100,8,124,2,161,5,2,0,1,0,83,0,113,222,124,3, - 144,1,114,92,116,18,160,19,100,9,124,8,161,2,1,0, - 116,18,160,20,124,1,100,8,161,2,125,13,124,8,103,1, - 124,13,95,21,124,13,83,0,100,8,83,0,41,10,122,111, - 84,114,121,32,116,111,32,102,105,110,100,32,97,32,115,112, - 101,99,32,102,111,114,32,116,104,101,32,115,112,101,99,105, - 102,105,101,100,32,109,111,100,117,108,101,46,10,10,32,32, - 32,32,32,32,32,32,82,101,116,117,114,110,115,32,116,104, - 101,32,109,97,116,99,104,105,110,103,32,115,112,101,99,44, - 32,111,114,32,78,111,110,101,32,105,102,32,110,111,116,32, - 102,111,117,110,100,46,10,32,32,32,32,32,32,32,32,70, - 114,71,0,0,0,114,28,0,0,0,114,104,0,0,0,114, - 209,0,0,0,122,9,116,114,121,105,110,103,32,123,125,41, - 1,90,9,118,101,114,98,111,115,105,116,121,78,122,25,112, - 111,115,115,105,98,108,101,32,110,97,109,101,115,112,97,99, - 101,32,102,111,114,32,123,125,41,22,114,41,0,0,0,114, - 49,0,0,0,114,44,0,0,0,114,4,0,0,0,114,55, - 0,0,0,114,10,1,0,0,114,50,0,0,0,114,65,1, - 0,0,218,11,95,102,105,108,108,95,99,97,99,104,101,114, - 9,0,0,0,114,68,1,0,0,114,105,0,0,0,114,67, - 1,0,0,114,38,0,0,0,114,64,1,0,0,114,54,0, - 0,0,114,58,1,0,0,114,56,0,0,0,114,134,0,0, - 0,114,149,0,0,0,114,183,0,0,0,114,178,0,0,0, - 41,14,114,118,0,0,0,114,139,0,0,0,114,202,0,0, - 0,90,12,105,115,95,110,97,109,101,115,112,97,99,101,90, - 11,116,97,105,108,95,109,111,100,117,108,101,114,169,0,0, - 0,90,5,99,97,99,104,101,90,12,99,97,99,104,101,95, - 109,111,100,117,108,101,90,9,98,97,115,101,95,112,97,116, - 104,114,17,1,0,0,114,188,0,0,0,90,13,105,110,105, - 116,95,102,105,108,101,110,97,109,101,90,9,102,117,108,108, - 95,112,97,116,104,114,187,0,0,0,114,5,0,0,0,114, - 5,0,0,0,114,8,0,0,0,114,203,0,0,0,158,5, - 0,0,115,72,0,0,0,0,5,4,1,14,1,2,1,24, - 1,12,1,10,1,10,1,8,1,6,2,6,1,6,1,10, - 2,6,1,4,2,8,1,12,1,14,1,8,1,10,1,8, - 1,26,4,8,2,14,1,16,1,16,1,12,1,8,1,10, - 1,4,255,10,2,6,1,12,1,12,1,8,1,4,1,122, - 20,70,105,108,101,70,105,110,100,101,114,46,102,105,110,100, - 95,115,112,101,99,99,1,0,0,0,0,0,0,0,0,0, - 0,0,9,0,0,0,10,0,0,0,67,0,0,0,115,188, - 0,0,0,124,0,106,0,125,1,122,22,116,1,160,2,124, - 1,112,22,116,1,160,3,161,0,161,1,125,2,87,0,110, - 28,4,0,116,4,116,5,116,6,102,3,121,56,1,0,1, - 0,1,0,103,0,125,2,89,0,110,2,48,0,116,7,106, - 8,160,9,100,1,161,1,115,82,116,10,124,2,131,1,124, - 0,95,11,110,74,116,10,131,0,125,3,124,2,68,0,93, - 56,125,4,124,4,160,12,100,2,161,1,92,3,125,5,125, - 6,125,7,124,6,114,134,100,3,160,13,124,5,124,7,160, - 14,161,0,161,2,125,8,110,4,124,5,125,8,124,3,160, - 15,124,8,161,1,1,0,113,92,124,3,124,0,95,11,116, - 7,106,8,160,9,116,16,161,1,114,184,100,4,100,5,132, - 0,124,2,68,0,131,1,124,0,95,17,100,6,83,0,41, - 7,122,68,70,105,108,108,32,116,104,101,32,99,97,99,104, - 101,32,111,102,32,112,111,116,101,110,116,105,97,108,32,109, - 111,100,117,108,101,115,32,97,110,100,32,112,97,99,107,97, - 103,101,115,32,102,111,114,32,116,104,105,115,32,100,105,114, - 101,99,116,111,114,121,46,114,0,0,0,0,114,71,0,0, - 0,114,61,0,0,0,99,1,0,0,0,0,0,0,0,0, - 0,0,0,2,0,0,0,4,0,0,0,83,0,0,0,115, - 20,0,0,0,104,0,124,0,93,12,125,1,124,1,160,0, - 161,0,146,2,113,4,83,0,114,5,0,0,0,41,1,114, - 105,0,0,0,41,2,114,32,0,0,0,90,2,102,110,114, - 5,0,0,0,114,5,0,0,0,114,8,0,0,0,218,9, - 60,115,101,116,99,111,109,112,62,235,5,0,0,114,63,1, - 0,0,122,41,70,105,108,101,70,105,110,100,101,114,46,95, - 102,105,108,108,95,99,97,99,104,101,46,60,108,111,99,97, - 108,115,62,46,60,115,101,116,99,111,109,112,62,78,41,18, - 114,44,0,0,0,114,4,0,0,0,114,7,1,0,0,114, - 55,0,0,0,114,3,1,0,0,218,15,80,101,114,109,105, - 115,115,105,111,110,69,114,114,111,114,218,18,78,111,116,65, - 68,105,114,101,99,116,111,114,121,69,114,114,111,114,114,1, - 0,0,0,114,10,0,0,0,114,11,0,0,0,114,66,1, - 0,0,114,67,1,0,0,114,100,0,0,0,114,62,0,0, - 0,114,105,0,0,0,218,3,97,100,100,114,12,0,0,0, - 114,68,1,0,0,41,9,114,118,0,0,0,114,44,0,0, - 0,114,8,1,0,0,90,21,108,111,119,101,114,95,115,117, - 102,102,105,120,95,99,111,110,116,101,110,116,115,114,41,1, - 0,0,114,116,0,0,0,114,29,1,0,0,114,17,1,0, - 0,90,8,110,101,119,95,110,97,109,101,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,70,1,0,0,206, - 5,0,0,115,34,0,0,0,0,2,6,1,2,1,22,1, - 18,3,10,3,12,1,12,7,6,1,8,1,16,1,4,1, - 18,2,4,1,12,1,6,1,12,1,122,22,70,105,108,101, - 70,105,110,100,101,114,46,95,102,105,108,108,95,99,97,99, - 104,101,99,1,0,0,0,0,0,0,0,0,0,0,0,3, - 0,0,0,3,0,0,0,7,0,0,0,115,18,0,0,0, - 135,0,135,1,102,2,100,1,100,2,132,8,125,2,124,2, - 83,0,41,3,97,20,1,0,0,65,32,99,108,97,115,115, - 32,109,101,116,104,111,100,32,119,104,105,99,104,32,114,101, - 116,117,114,110,115,32,97,32,99,108,111,115,117,114,101,32, - 116,111,32,117,115,101,32,111,110,32,115,121,115,46,112,97, - 116,104,95,104,111,111,107,10,32,32,32,32,32,32,32,32, - 119,104,105,99,104,32,119,105,108,108,32,114,101,116,117,114, - 110,32,97,110,32,105,110,115,116,97,110,99,101,32,117,115, - 105,110,103,32,116,104,101,32,115,112,101,99,105,102,105,101, - 100,32,108,111,97,100,101,114,115,32,97,110,100,32,116,104, - 101,32,112,97,116,104,10,32,32,32,32,32,32,32,32,99, - 97,108,108,101,100,32,111,110,32,116,104,101,32,99,108,111, - 115,117,114,101,46,10,10,32,32,32,32,32,32,32,32,73, - 102,32,116,104,101,32,112,97,116,104,32,99,97,108,108,101, - 100,32,111,110,32,116,104,101,32,99,108,111,115,117,114,101, - 32,105,115,32,110,111,116,32,97,32,100,105,114,101,99,116, - 111,114,121,44,32,73,109,112,111,114,116,69,114,114,111,114, - 32,105,115,10,32,32,32,32,32,32,32,32,114,97,105,115, - 101,100,46,10,10,32,32,32,32,32,32,32,32,99,1,0, - 0,0,0,0,0,0,0,0,0,0,1,0,0,0,4,0, - 0,0,19,0,0,0,115,36,0,0,0,116,0,124,0,131, - 1,115,20,116,1,100,1,124,0,100,2,141,2,130,1,136, - 0,124,0,103,1,136,1,162,1,82,0,142,0,83,0,41, - 3,122,45,80,97,116,104,32,104,111,111,107,32,102,111,114, - 32,105,109,112,111,114,116,108,105,98,46,109,97,99,104,105, - 110,101,114,121,46,70,105,108,101,70,105,110,100,101,114,46, - 122,30,111,110,108,121,32,100,105,114,101,99,116,111,114,105, - 101,115,32,97,114,101,32,115,117,112,112,111,114,116,101,100, - 114,48,0,0,0,41,2,114,56,0,0,0,114,117,0,0, - 0,114,48,0,0,0,169,2,114,193,0,0,0,114,69,1, - 0,0,114,5,0,0,0,114,8,0,0,0,218,24,112,97, - 116,104,95,104,111,111,107,95,102,111,114,95,70,105,108,101, - 70,105,110,100,101,114,247,5,0,0,115,6,0,0,0,0, - 2,8,1,12,1,122,54,70,105,108,101,70,105,110,100,101, - 114,46,112,97,116,104,95,104,111,111,107,46,60,108,111,99, - 97,108,115,62,46,112,97,116,104,95,104,111,111,107,95,102, - 111,114,95,70,105,108,101,70,105,110,100,101,114,114,5,0, - 0,0,41,3,114,193,0,0,0,114,69,1,0,0,114,76, - 1,0,0,114,5,0,0,0,114,75,1,0,0,114,8,0, - 0,0,218,9,112,97,116,104,95,104,111,111,107,237,5,0, - 0,115,4,0,0,0,0,10,14,6,122,20,70,105,108,101, - 70,105,110,100,101,114,46,112,97,116,104,95,104,111,111,107, - 99,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0, - 0,3,0,0,0,67,0,0,0,115,12,0,0,0,100,1, - 160,0,124,0,106,1,161,1,83,0,41,2,78,122,16,70, - 105,108,101,70,105,110,100,101,114,40,123,33,114,125,41,41, - 2,114,62,0,0,0,114,44,0,0,0,114,246,0,0,0, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 39,1,0,0,255,5,0,0,115,2,0,0,0,0,1,122, - 19,70,105,108,101,70,105,110,100,101,114,46,95,95,114,101, - 112,114,95,95,41,1,78,41,15,114,125,0,0,0,114,124, - 0,0,0,114,126,0,0,0,114,127,0,0,0,114,209,0, - 0,0,114,46,1,0,0,114,143,0,0,0,114,206,0,0, - 0,114,137,0,0,0,114,58,1,0,0,114,203,0,0,0, - 114,70,1,0,0,114,207,0,0,0,114,77,1,0,0,114, - 39,1,0,0,114,5,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,61,1,0,0,112,5,0, - 0,115,22,0,0,0,8,2,4,7,8,14,8,4,4,2, - 8,12,8,5,10,48,8,31,2,1,10,17,114,61,1,0, - 0,99,4,0,0,0,0,0,0,0,0,0,0,0,6,0, - 0,0,8,0,0,0,67,0,0,0,115,144,0,0,0,124, - 0,160,0,100,1,161,1,125,4,124,0,160,0,100,2,161, - 1,125,5,124,4,115,66,124,5,114,36,124,5,106,1,125, - 4,110,30,124,2,124,3,107,2,114,56,116,2,124,1,124, - 2,131,2,125,4,110,10,116,3,124,1,124,2,131,2,125, - 4,124,5,115,84,116,4,124,1,124,2,124,4,100,3,141, - 3,125,5,122,36,124,5,124,0,100,2,60,0,124,4,124, - 0,100,1,60,0,124,2,124,0,100,4,60,0,124,3,124, - 0,100,5,60,0,87,0,110,18,4,0,116,5,121,138,1, - 0,1,0,1,0,89,0,110,2,48,0,100,0,83,0,41, - 6,78,218,10,95,95,108,111,97,100,101,114,95,95,218,8, - 95,95,115,112,101,99,95,95,114,62,1,0,0,90,8,95, - 95,102,105,108,101,95,95,90,10,95,95,99,97,99,104,101, - 100,95,95,41,6,218,3,103,101,116,114,140,0,0,0,114, - 15,1,0,0,114,9,1,0,0,114,190,0,0,0,218,9, - 69,120,99,101,112,116,105,111,110,41,6,90,2,110,115,114, - 116,0,0,0,90,8,112,97,116,104,110,97,109,101,90,9, - 99,112,97,116,104,110,97,109,101,114,140,0,0,0,114,187, - 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,218,14,95,102,105,120,95,117,112,95,109,111,100,117, - 108,101,5,6,0,0,115,34,0,0,0,0,2,10,1,10, - 1,4,1,4,1,8,1,8,1,12,2,10,1,4,1,14, - 1,2,1,8,1,8,1,8,1,12,1,12,2,114,82,1, - 0,0,99,0,0,0,0,0,0,0,0,0,0,0,0,3, - 0,0,0,3,0,0,0,67,0,0,0,115,38,0,0,0, - 116,0,116,1,160,2,161,0,102,2,125,0,116,3,116,4, - 102,2,125,1,116,5,116,6,102,2,125,2,124,0,124,1, - 124,2,103,3,83,0,41,1,122,95,82,101,116,117,114,110, - 115,32,97,32,108,105,115,116,32,111,102,32,102,105,108,101, - 45,98,97,115,101,100,32,109,111,100,117,108,101,32,108,111, - 97,100,101,114,115,46,10,10,32,32,32,32,69,97,99,104, - 32,105,116,101,109,32,105,115,32,97,32,116,117,112,108,101, - 32,40,108,111,97,100,101,114,44,32,115,117,102,102,105,120, - 101,115,41,46,10,32,32,32,32,41,7,114,252,0,0,0, - 114,163,0,0,0,218,18,101,120,116,101,110,115,105,111,110, - 95,115,117,102,102,105,120,101,115,114,9,1,0,0,114,101, - 0,0,0,114,15,1,0,0,114,88,0,0,0,41,3,90, - 10,101,120,116,101,110,115,105,111,110,115,90,6,115,111,117, - 114,99,101,90,8,98,121,116,101,99,111,100,101,114,5,0, - 0,0,114,5,0,0,0,114,8,0,0,0,114,184,0,0, - 0,28,6,0,0,115,8,0,0,0,0,5,12,1,8,1, - 8,1,114,184,0,0,0,99,1,0,0,0,0,0,0,0, - 0,0,0,0,10,0,0,0,9,0,0,0,67,0,0,0, - 115,132,1,0,0,124,0,97,0,116,0,106,1,97,1,116, - 0,106,2,97,2,116,1,106,3,116,4,25,0,125,1,100, - 1,100,2,103,1,102,2,100,3,100,4,100,2,103,2,102, - 2,102,2,125,2,124,2,68,0,93,108,92,2,125,3,125, - 4,116,5,100,5,100,6,132,0,124,4,68,0,131,1,131, - 1,115,82,74,0,130,1,124,4,100,7,25,0,125,5,124, - 3,116,1,106,3,118,0,114,116,116,1,106,3,124,3,25, - 0,125,6,1,0,113,170,113,52,122,20,116,0,160,6,124, - 3,161,1,125,6,87,0,1,0,113,170,87,0,113,52,4, - 0,116,7,121,158,1,0,1,0,1,0,89,0,113,52,89, - 0,113,52,48,0,113,52,116,7,100,8,131,1,130,1,116, - 8,124,1,100,9,124,6,131,3,1,0,116,8,124,1,100, - 10,124,5,131,3,1,0,116,8,124,1,100,11,100,12,160, - 9,124,4,161,1,131,3,1,0,116,8,124,1,100,13,100, - 14,100,15,132,0,124,4,68,0,131,1,131,3,1,0,103, - 0,100,16,162,1,125,7,124,3,100,3,107,2,144,1,114, - 6,124,7,160,10,100,17,161,1,1,0,124,7,68,0,93, - 52,125,8,124,8,116,1,106,3,118,1,144,1,114,38,116, - 0,160,6,124,8,161,1,125,9,110,10,116,1,106,3,124, - 8,25,0,125,9,116,8,124,1,124,8,124,9,131,3,1, - 0,144,1,113,10,116,8,124,1,100,18,116,11,131,0,131, - 3,1,0,116,12,160,13,116,2,160,14,161,0,161,1,1, - 0,124,3,100,3,107,2,144,1,114,128,116,15,160,10,100, - 19,161,1,1,0,100,20,116,12,118,0,144,1,114,128,100, - 21,116,16,95,17,100,22,83,0,41,23,122,205,83,101,116, - 117,112,32,116,104,101,32,112,97,116,104,45,98,97,115,101, - 100,32,105,109,112,111,114,116,101,114,115,32,102,111,114,32, - 105,109,112,111,114,116,108,105,98,32,98,121,32,105,109,112, - 111,114,116,105,110,103,32,110,101,101,100,101,100,10,32,32, - 32,32,98,117,105,108,116,45,105,110,32,109,111,100,117,108, - 101,115,32,97,110,100,32,105,110,106,101,99,116,105,110,103, - 32,116,104,101,109,32,105,110,116,111,32,116,104,101,32,103, - 108,111,98,97,108,32,110,97,109,101,115,112,97,99,101,46, - 10,10,32,32,32,32,79,116,104,101,114,32,99,111,109,112, - 111,110,101,110,116,115,32,97,114,101,32,101,120,116,114,97, - 99,116,101,100,32,102,114,111,109,32,116,104,101,32,99,111, - 114,101,32,98,111,111,116,115,116,114,97,112,32,109,111,100, - 117,108,101,46,10,10,32,32,32,32,90,5,112,111,115,105, - 120,250,1,47,90,2,110,116,250,1,92,99,1,0,0,0, - 0,0,0,0,0,0,0,0,2,0,0,0,3,0,0,0, - 115,0,0,0,115,26,0,0,0,124,0,93,18,125,1,116, - 0,124,1,131,1,100,0,107,2,86,0,1,0,113,2,100, - 1,83,0,41,2,114,39,0,0,0,78,41,1,114,23,0, - 0,0,41,2,114,32,0,0,0,114,94,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,114,19,1, - 0,0,57,6,0,0,114,63,1,0,0,122,25,95,115,101, - 116,117,112,46,60,108,111,99,97,108,115,62,46,60,103,101, - 110,101,120,112,114,62,114,73,0,0,0,122,30,105,109,112, - 111,114,116,108,105,98,32,114,101,113,117,105,114,101,115,32, - 112,111,115,105,120,32,111,114,32,110,116,114,4,0,0,0, - 114,35,0,0,0,114,31,0,0,0,114,40,0,0,0,114, - 58,0,0,0,99,1,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,4,0,0,0,83,0,0,0,115,22,0, - 0,0,104,0,124,0,93,14,125,1,100,0,124,1,155,0, - 157,2,146,2,113,4,83,0,41,1,114,74,0,0,0,114, - 5,0,0,0,41,2,114,32,0,0,0,218,1,115,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,114,71,1, - 0,0,74,6,0,0,114,63,1,0,0,122,25,95,115,101, - 116,117,112,46,60,108,111,99,97,108,115,62,46,60,115,101, - 116,99,111,109,112,62,41,3,114,64,0,0,0,114,75,0, - 0,0,114,160,0,0,0,114,192,0,0,0,114,9,0,0, - 0,122,4,46,112,121,119,122,6,95,100,46,112,121,100,84, - 78,41,18,114,134,0,0,0,114,1,0,0,0,114,163,0, - 0,0,114,31,1,0,0,114,125,0,0,0,218,3,97,108, - 108,90,18,95,98,117,105,108,116,105,110,95,102,114,111,109, - 95,110,97,109,101,114,117,0,0,0,114,129,0,0,0,114, - 36,0,0,0,114,186,0,0,0,114,14,0,0,0,114,21, - 1,0,0,114,167,0,0,0,114,83,1,0,0,114,101,0, - 0,0,114,191,0,0,0,114,195,0,0,0,41,10,218,17, - 95,98,111,111,116,115,116,114,97,112,95,109,111,100,117,108, - 101,90,11,115,101,108,102,95,109,111,100,117,108,101,90,10, - 111,115,95,100,101,116,97,105,108,115,90,10,98,117,105,108, - 116,105,110,95,111,115,114,31,0,0,0,114,35,0,0,0, - 90,9,111,115,95,109,111,100,117,108,101,90,13,98,117,105, - 108,116,105,110,95,110,97,109,101,115,90,12,98,117,105,108, - 116,105,110,95,110,97,109,101,90,14,98,117,105,108,116,105, - 110,95,109,111,100,117,108,101,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,218,6,95,115,101,116,117,112,39, - 6,0,0,115,70,0,0,0,0,8,4,1,6,1,6,2, - 10,3,22,1,12,2,22,1,8,1,10,1,10,1,6,2, - 2,1,10,1,10,1,12,1,12,2,8,2,12,1,12,1, - 18,1,22,3,8,1,10,1,10,1,8,1,12,1,12,2, - 10,1,16,3,14,1,14,1,10,1,10,1,10,1,114,89, - 1,0,0,99,1,0,0,0,0,0,0,0,0,0,0,0, - 2,0,0,0,4,0,0,0,67,0,0,0,115,50,0,0, - 0,116,0,124,0,131,1,1,0,116,1,131,0,125,1,116, - 2,106,3,160,4,116,5,106,6,124,1,142,0,103,1,161, - 1,1,0,116,2,106,7,160,8,116,9,161,1,1,0,100, - 1,83,0,41,2,122,41,73,110,115,116,97,108,108,32,116, - 104,101,32,112,97,116,104,45,98,97,115,101,100,32,105,109, - 112,111,114,116,32,99,111,109,112,111,110,101,110,116,115,46, - 78,41,10,114,89,1,0,0,114,184,0,0,0,114,1,0, - 0,0,114,51,1,0,0,114,167,0,0,0,114,61,1,0, - 0,114,77,1,0,0,218,9,109,101,116,97,95,112,97,116, - 104,114,186,0,0,0,114,45,1,0,0,41,2,114,88,1, - 0,0,90,17,115,117,112,112,111,114,116,101,100,95,108,111, - 97,100,101,114,115,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,218,8,95,105,110,115,116,97,108,108,96,6, - 0,0,115,8,0,0,0,0,2,8,1,6,1,20,1,114, - 91,1,0,0,41,1,114,60,0,0,0,41,1,78,41,3, - 78,78,78,41,2,114,73,0,0,0,114,73,0,0,0,41, - 1,84,41,1,78,41,1,78,41,63,114,127,0,0,0,114, - 13,0,0,0,90,37,95,67,65,83,69,95,73,78,83,69, - 78,83,73,84,73,86,69,95,80,76,65,84,70,79,82,77, - 83,95,66,89,84,69,83,95,75,69,89,114,12,0,0,0, - 114,14,0,0,0,114,21,0,0,0,114,27,0,0,0,114, - 29,0,0,0,114,38,0,0,0,114,47,0,0,0,114,49, - 0,0,0,114,53,0,0,0,114,54,0,0,0,114,56,0, - 0,0,114,59,0,0,0,114,69,0,0,0,218,4,116,121, - 112,101,218,8,95,95,99,111,100,101,95,95,114,162,0,0, - 0,114,19,0,0,0,114,148,0,0,0,114,18,0,0,0, - 114,24,0,0,0,114,236,0,0,0,114,91,0,0,0,114, - 87,0,0,0,114,101,0,0,0,114,88,0,0,0,90,23, - 68,69,66,85,71,95,66,89,84,69,67,79,68,69,95,83, - 85,70,70,73,88,69,83,90,27,79,80,84,73,77,73,90, - 69,68,95,66,89,84,69,67,79,68,69,95,83,85,70,70, - 73,88,69,83,114,97,0,0,0,114,102,0,0,0,114,108, - 0,0,0,114,112,0,0,0,114,114,0,0,0,114,136,0, - 0,0,114,143,0,0,0,114,152,0,0,0,114,156,0,0, - 0,114,158,0,0,0,114,165,0,0,0,114,170,0,0,0, - 114,171,0,0,0,114,176,0,0,0,218,6,111,98,106,101, - 99,116,114,185,0,0,0,114,190,0,0,0,114,191,0,0, - 0,114,208,0,0,0,114,221,0,0,0,114,239,0,0,0, - 114,9,1,0,0,114,15,1,0,0,114,21,1,0,0,114, - 252,0,0,0,114,22,1,0,0,114,43,1,0,0,114,45, - 1,0,0,114,61,1,0,0,114,82,1,0,0,114,184,0, - 0,0,114,89,1,0,0,114,91,1,0,0,114,5,0,0, - 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 218,8,60,109,111,100,117,108,101,62,1,0,0,0,115,126, - 0,0,0,4,22,4,1,4,1,2,1,2,255,4,4,8, - 17,8,5,8,5,8,6,8,6,8,12,8,10,8,9,8, - 5,8,7,8,9,10,22,10,127,0,20,16,1,12,2,4, - 1,4,2,6,2,6,2,8,2,16,71,8,40,8,19,8, - 12,8,12,8,28,8,17,8,33,8,28,8,24,10,13,10, - 10,10,11,8,14,6,3,4,1,2,255,12,68,14,64,14, - 29,16,127,0,17,14,72,18,45,18,26,4,3,18,53,14, - 63,14,42,14,127,0,20,14,127,0,22,10,23,8,11,8, - 57, + 4,0,0,0,83,0,0,0,115,22,0,0,0,104,0,124, + 0,93,14,125,1,100,0,124,1,155,0,157,2,146,2,113, + 4,83,0,41,1,114,74,0,0,0,114,5,0,0,0,41, + 2,114,32,0,0,0,218,1,115,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,64,1,0,0,52,6,0, + 0,114,56,1,0,0,122,25,95,115,101,116,117,112,46,60, + 108,111,99,97,108,115,62,46,60,115,101,116,99,111,109,112, + 62,41,3,114,64,0,0,0,114,75,0,0,0,114,160,0, + 0,0,114,192,0,0,0,114,9,0,0,0,122,4,46,112, + 121,119,122,6,95,100,46,112,121,100,84,78,41,18,114,134, + 0,0,0,114,1,0,0,0,114,163,0,0,0,114,22,1, + 0,0,114,125,0,0,0,218,3,97,108,108,90,18,95,98, + 117,105,108,116,105,110,95,102,114,111,109,95,110,97,109,101, + 114,117,0,0,0,114,129,0,0,0,114,36,0,0,0,114, + 186,0,0,0,114,14,0,0,0,114,12,1,0,0,114,167, + 0,0,0,114,76,1,0,0,114,101,0,0,0,114,191,0, + 0,0,114,195,0,0,0,41,10,218,17,95,98,111,111,116, + 115,116,114,97,112,95,109,111,100,117,108,101,90,11,115,101, + 108,102,95,109,111,100,117,108,101,90,10,111,115,95,100,101, + 116,97,105,108,115,90,10,98,117,105,108,116,105,110,95,111, + 115,114,31,0,0,0,114,35,0,0,0,90,9,111,115,95, + 109,111,100,117,108,101,90,13,98,117,105,108,116,105,110,95, + 110,97,109,101,115,90,12,98,117,105,108,116,105,110,95,110, + 97,109,101,90,14,98,117,105,108,116,105,110,95,109,111,100, + 117,108,101,114,5,0,0,0,114,5,0,0,0,114,8,0, + 0,0,218,6,95,115,101,116,117,112,17,6,0,0,115,70, + 0,0,0,0,8,4,1,6,1,6,2,10,3,22,1,12, + 2,22,1,8,1,10,1,10,1,6,2,2,1,10,1,10, + 1,12,1,12,2,8,2,12,1,12,1,18,1,22,3,8, + 1,10,1,10,1,8,1,12,1,12,2,10,1,16,3,14, + 1,14,1,10,1,10,1,10,1,114,82,1,0,0,99,1, + 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,4, + 0,0,0,67,0,0,0,115,50,0,0,0,116,0,124,0, + 131,1,1,0,116,1,131,0,125,1,116,2,106,3,160,4, + 116,5,106,6,124,1,142,0,103,1,161,1,1,0,116,2, + 106,7,160,8,116,9,161,1,1,0,100,1,83,0,41,2, + 122,41,73,110,115,116,97,108,108,32,116,104,101,32,112,97, + 116,104,45,98,97,115,101,100,32,105,109,112,111,114,116,32, + 99,111,109,112,111,110,101,110,116,115,46,78,41,10,114,82, + 1,0,0,114,184,0,0,0,114,1,0,0,0,114,43,1, + 0,0,114,167,0,0,0,114,54,1,0,0,114,70,1,0, + 0,218,9,109,101,116,97,95,112,97,116,104,114,186,0,0, + 0,114,37,1,0,0,41,2,114,81,1,0,0,90,17,115, + 117,112,112,111,114,116,101,100,95,108,111,97,100,101,114,115, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, + 8,95,105,110,115,116,97,108,108,74,6,0,0,115,8,0, + 0,0,0,2,8,1,6,1,20,1,114,84,1,0,0,41, + 1,114,60,0,0,0,41,1,78,41,3,78,78,78,41,2, + 114,73,0,0,0,114,73,0,0,0,41,1,84,41,1,78, + 41,1,78,41,63,114,127,0,0,0,114,13,0,0,0,90, + 37,95,67,65,83,69,95,73,78,83,69,78,83,73,84,73, + 86,69,95,80,76,65,84,70,79,82,77,83,95,66,89,84, + 69,83,95,75,69,89,114,12,0,0,0,114,14,0,0,0, + 114,21,0,0,0,114,27,0,0,0,114,29,0,0,0,114, + 38,0,0,0,114,47,0,0,0,114,49,0,0,0,114,53, + 0,0,0,114,54,0,0,0,114,56,0,0,0,114,59,0, + 0,0,114,69,0,0,0,218,4,116,121,112,101,218,8,95, + 95,99,111,100,101,95,95,114,162,0,0,0,114,19,0,0, + 0,114,148,0,0,0,114,18,0,0,0,114,24,0,0,0, + 114,236,0,0,0,114,91,0,0,0,114,87,0,0,0,114, + 101,0,0,0,114,88,0,0,0,90,23,68,69,66,85,71, + 95,66,89,84,69,67,79,68,69,95,83,85,70,70,73,88, + 69,83,90,27,79,80,84,73,77,73,90,69,68,95,66,89, + 84,69,67,79,68,69,95,83,85,70,70,73,88,69,83,114, + 97,0,0,0,114,102,0,0,0,114,108,0,0,0,114,112, + 0,0,0,114,114,0,0,0,114,136,0,0,0,114,143,0, + 0,0,114,152,0,0,0,114,156,0,0,0,114,158,0,0, + 0,114,165,0,0,0,114,170,0,0,0,114,171,0,0,0, + 114,176,0,0,0,218,6,111,98,106,101,99,116,114,185,0, + 0,0,114,190,0,0,0,114,191,0,0,0,114,208,0,0, + 0,114,221,0,0,0,114,239,0,0,0,114,255,0,0,0, + 114,5,1,0,0,114,12,1,0,0,114,252,0,0,0,114, + 13,1,0,0,114,35,1,0,0,114,37,1,0,0,114,54, + 1,0,0,114,75,1,0,0,114,184,0,0,0,114,82,1, + 0,0,114,84,1,0,0,114,5,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,218,8,60,109,111, + 100,117,108,101,62,1,0,0,0,115,126,0,0,0,4,22, + 4,1,4,1,2,1,2,255,4,4,8,17,8,5,8,5, + 8,6,8,6,8,12,8,10,8,9,8,5,8,7,8,9, + 10,22,10,127,0,20,16,1,12,2,4,1,4,2,6,2, + 6,2,8,2,16,71,8,40,8,19,8,12,8,12,8,28, + 8,17,8,33,8,28,8,24,10,13,10,10,10,11,8,14, + 6,3,4,1,2,255,12,68,14,64,14,29,16,127,0,17, + 14,50,18,45,18,26,4,3,18,53,14,63,14,42,14,127, + 0,20,14,127,0,22,10,23,8,11,8,57, }; diff --git a/Python/importlib_zipimport.h b/Python/importlib_zipimport.h index 373b1366bdea3..be7d24fe1df25 100644 --- a/Python/importlib_zipimport.h +++ b/Python/importlib_zipimport.h @@ -1,7 +1,7 @@ /* Auto-generated by Programs/_freeze_importlib.c */ const unsigned char _Py_M__zipimport[] = { 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,4,0,0,0,64,0,0,0,115,82,1,0,0,100,0, + 0,4,0,0,0,64,0,0,0,115,68,1,0,0,100,0, 90,0,100,1,100,2,108,1,90,2,100,1,100,3,108,1, 109,3,90,3,109,4,90,4,1,0,100,1,100,2,108,5, 90,6,100,1,100,2,108,7,90,7,100,1,100,2,108,8, @@ -21,1059 +21,941 @@ const unsigned char _Py_M__zipimport[] = { 132,0,90,35,101,19,101,35,106,36,131,1,90,37,100,35, 100,36,132,0,90,38,100,37,100,38,132,0,90,39,100,39, 100,40,132,0,90,40,100,41,100,42,132,0,90,41,100,43, - 100,44,132,0,90,42,100,45,100,46,132,0,90,43,71,0, - 100,47,100,48,132,0,100,48,131,2,90,44,100,2,83,0, - 41,49,97,80,2,0,0,122,105,112,105,109,112,111,114,116, - 32,112,114,111,118,105,100,101,115,32,115,117,112,112,111,114, - 116,32,102,111,114,32,105,109,112,111,114,116,105,110,103,32, - 80,121,116,104,111,110,32,109,111,100,117,108,101,115,32,102, - 114,111,109,32,90,105,112,32,97,114,99,104,105,118,101,115, - 46,10,10,84,104,105,115,32,109,111,100,117,108,101,32,101, - 120,112,111,114,116,115,32,116,104,114,101,101,32,111,98,106, - 101,99,116,115,58,10,45,32,122,105,112,105,109,112,111,114, - 116,101,114,58,32,97,32,99,108,97,115,115,59,32,105,116, - 115,32,99,111,110,115,116,114,117,99,116,111,114,32,116,97, - 107,101,115,32,97,32,112,97,116,104,32,116,111,32,97,32, - 90,105,112,32,97,114,99,104,105,118,101,46,10,45,32,90, - 105,112,73,109,112,111,114,116,69,114,114,111,114,58,32,101, - 120,99,101,112,116,105,111,110,32,114,97,105,115,101,100,32, - 98,121,32,122,105,112,105,109,112,111,114,116,101,114,32,111, - 98,106,101,99,116,115,46,32,73,116,39,115,32,97,10,32, - 32,115,117,98,99,108,97,115,115,32,111,102,32,73,109,112, - 111,114,116,69,114,114,111,114,44,32,115,111,32,105,116,32, - 99,97,110,32,98,101,32,99,97,117,103,104,116,32,97,115, - 32,73,109,112,111,114,116,69,114,114,111,114,44,32,116,111, - 111,46,10,45,32,95,122,105,112,95,100,105,114,101,99,116, - 111,114,121,95,99,97,99,104,101,58,32,97,32,100,105,99, - 116,44,32,109,97,112,112,105,110,103,32,97,114,99,104,105, - 118,101,32,112,97,116,104,115,32,116,111,32,122,105,112,32, - 100,105,114,101,99,116,111,114,121,10,32,32,105,110,102,111, - 32,100,105,99,116,115,44,32,97,115,32,117,115,101,100,32, - 105,110,32,122,105,112,105,109,112,111,114,116,101,114,46,95, - 102,105,108,101,115,46,10,10,73,116,32,105,115,32,117,115, - 117,97,108,108,121,32,110,111,116,32,110,101,101,100,101,100, - 32,116,111,32,117,115,101,32,116,104,101,32,122,105,112,105, - 109,112,111,114,116,32,109,111,100,117,108,101,32,101,120,112, - 108,105,99,105,116,108,121,59,32,105,116,32,105,115,10,117, - 115,101,100,32,98,121,32,116,104,101,32,98,117,105,108,116, - 105,110,32,105,109,112,111,114,116,32,109,101,99,104,97,110, - 105,115,109,32,102,111,114,32,115,121,115,46,112,97,116,104, - 32,105,116,101,109,115,32,116,104,97,116,32,97,114,101,32, - 112,97,116,104,115,10,116,111,32,90,105,112,32,97,114,99, - 104,105,118,101,115,46,10,233,0,0,0,0,78,41,2,218, - 14,95,117,110,112,97,99,107,95,117,105,110,116,49,54,218, - 14,95,117,110,112,97,99,107,95,117,105,110,116,51,50,218, - 14,90,105,112,73,109,112,111,114,116,69,114,114,111,114,218, - 11,122,105,112,105,109,112,111,114,116,101,114,233,1,0,0, - 0,99,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,1,0,0,0,64,0,0,0,115,12,0,0,0,101, - 0,90,1,100,0,90,2,100,1,83,0,41,2,114,3,0, - 0,0,78,41,3,218,8,95,95,110,97,109,101,95,95,218, - 10,95,95,109,111,100,117,108,101,95,95,218,12,95,95,113, - 117,97,108,110,97,109,101,95,95,169,0,114,9,0,0,0, - 114,9,0,0,0,250,18,60,102,114,111,122,101,110,32,122, - 105,112,105,109,112,111,114,116,62,114,3,0,0,0,33,0, - 0,0,115,2,0,0,0,8,1,233,22,0,0,0,115,4, - 0,0,0,80,75,5,6,105,255,255,0,0,99,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,3,0,0, - 0,64,0,0,0,115,108,0,0,0,101,0,90,1,100,0, - 90,2,100,1,90,3,100,2,100,3,132,0,90,4,100,25, - 100,5,100,6,132,1,90,5,100,26,100,7,100,8,132,1, - 90,6,100,9,100,10,132,0,90,7,100,11,100,12,132,0, - 90,8,100,13,100,14,132,0,90,9,100,15,100,16,132,0, - 90,10,100,17,100,18,132,0,90,11,100,19,100,20,132,0, - 90,12,100,21,100,22,132,0,90,13,100,23,100,24,132,0, - 90,14,100,4,83,0,41,27,114,4,0,0,0,97,255,1, - 0,0,122,105,112,105,109,112,111,114,116,101,114,40,97,114, - 99,104,105,118,101,112,97,116,104,41,32,45,62,32,122,105, - 112,105,109,112,111,114,116,101,114,32,111,98,106,101,99,116, - 10,10,32,32,32,32,67,114,101,97,116,101,32,97,32,110, - 101,119,32,122,105,112,105,109,112,111,114,116,101,114,32,105, - 110,115,116,97,110,99,101,46,32,39,97,114,99,104,105,118, - 101,112,97,116,104,39,32,109,117,115,116,32,98,101,32,97, - 32,112,97,116,104,32,116,111,10,32,32,32,32,97,32,122, - 105,112,102,105,108,101,44,32,111,114,32,116,111,32,97,32, - 115,112,101,99,105,102,105,99,32,112,97,116,104,32,105,110, - 115,105,100,101,32,97,32,122,105,112,102,105,108,101,46,32, - 70,111,114,32,101,120,97,109,112,108,101,44,32,105,116,32, - 99,97,110,32,98,101,10,32,32,32,32,39,47,116,109,112, - 47,109,121,105,109,112,111,114,116,46,122,105,112,39,44,32, - 111,114,32,39,47,116,109,112,47,109,121,105,109,112,111,114, - 116,46,122,105,112,47,109,121,100,105,114,101,99,116,111,114, - 121,39,44,32,105,102,32,109,121,100,105,114,101,99,116,111, - 114,121,32,105,115,32,97,10,32,32,32,32,118,97,108,105, - 100,32,100,105,114,101,99,116,111,114,121,32,105,110,115,105, - 100,101,32,116,104,101,32,97,114,99,104,105,118,101,46,10, - 10,32,32,32,32,39,90,105,112,73,109,112,111,114,116,69, - 114,114,111,114,32,105,115,32,114,97,105,115,101,100,32,105, - 102,32,39,97,114,99,104,105,118,101,112,97,116,104,39,32, - 100,111,101,115,110,39,116,32,112,111,105,110,116,32,116,111, - 32,97,32,118,97,108,105,100,32,90,105,112,10,32,32,32, - 32,97,114,99,104,105,118,101,46,10,10,32,32,32,32,84, - 104,101,32,39,97,114,99,104,105,118,101,39,32,97,116,116, - 114,105,98,117,116,101,32,111,102,32,122,105,112,105,109,112, - 111,114,116,101,114,32,111,98,106,101,99,116,115,32,99,111, - 110,116,97,105,110,115,32,116,104,101,32,110,97,109,101,32, - 111,102,32,116,104,101,10,32,32,32,32,122,105,112,102,105, - 108,101,32,116,97,114,103,101,116,101,100,46,10,32,32,32, - 32,99,2,0,0,0,0,0,0,0,0,0,0,0,8,0, - 0,0,9,0,0,0,67,0,0,0,115,32,1,0,0,116, - 0,124,1,116,1,131,2,115,28,100,1,100,0,108,2,125, - 2,124,2,160,3,124,1,161,1,125,1,124,1,115,44,116, - 4,100,2,124,1,100,3,141,2,130,1,116,5,114,60,124, - 1,160,6,116,5,116,7,161,2,125,1,103,0,125,3,122, - 14,116,8,160,9,124,1,161,1,125,4,87,0,110,70,4, - 0,116,10,116,11,102,2,121,148,1,0,1,0,1,0,116, - 8,160,12,124,1,161,1,92,2,125,5,125,6,124,5,124, - 1,107,2,114,130,116,4,100,4,124,1,100,3,141,2,130, - 1,124,5,125,1,124,3,160,13,124,6,161,1,1,0,89, - 0,113,64,48,0,124,4,106,14,100,5,64,0,100,6,107, - 3,114,180,116,4,100,4,124,1,100,3,141,2,130,1,113, - 180,113,64,122,12,116,15,124,1,25,0,125,7,87,0,110, - 34,4,0,116,16,121,226,1,0,1,0,1,0,116,17,124, - 1,131,1,125,7,124,7,116,15,124,1,60,0,89,0,110, - 2,48,0,124,7,124,0,95,18,124,1,124,0,95,19,116, - 8,106,20,124,3,100,0,100,0,100,7,133,3,25,0,142, - 0,124,0,95,21,124,0,106,21,144,1,114,28,124,0,4, - 0,106,21,116,7,55,0,2,0,95,21,100,0,83,0,41, - 8,78,114,0,0,0,0,122,21,97,114,99,104,105,118,101, - 32,112,97,116,104,32,105,115,32,101,109,112,116,121,169,1, - 218,4,112,97,116,104,122,14,110,111,116,32,97,32,90,105, - 112,32,102,105,108,101,105,0,240,0,0,105,0,128,0,0, - 233,255,255,255,255,41,22,218,10,105,115,105,110,115,116,97, - 110,99,101,218,3,115,116,114,218,2,111,115,90,8,102,115, - 100,101,99,111,100,101,114,3,0,0,0,218,12,97,108,116, - 95,112,97,116,104,95,115,101,112,218,7,114,101,112,108,97, - 99,101,218,8,112,97,116,104,95,115,101,112,218,19,95,98, - 111,111,116,115,116,114,97,112,95,101,120,116,101,114,110,97, - 108,90,10,95,112,97,116,104,95,115,116,97,116,218,7,79, - 83,69,114,114,111,114,218,10,86,97,108,117,101,69,114,114, - 111,114,90,11,95,112,97,116,104,95,115,112,108,105,116,218, - 6,97,112,112,101,110,100,90,7,115,116,95,109,111,100,101, - 218,20,95,122,105,112,95,100,105,114,101,99,116,111,114,121, - 95,99,97,99,104,101,218,8,75,101,121,69,114,114,111,114, - 218,15,95,114,101,97,100,95,100,105,114,101,99,116,111,114, - 121,218,6,95,102,105,108,101,115,218,7,97,114,99,104,105, - 118,101,218,10,95,112,97,116,104,95,106,111,105,110,218,6, - 112,114,101,102,105,120,41,8,218,4,115,101,108,102,114,13, - 0,0,0,114,17,0,0,0,114,31,0,0,0,90,2,115, - 116,90,7,100,105,114,110,97,109,101,90,8,98,97,115,101, - 110,97,109,101,218,5,102,105,108,101,115,114,9,0,0,0, - 114,9,0,0,0,114,10,0,0,0,218,8,95,95,105,110, - 105,116,95,95,63,0,0,0,115,58,0,0,0,0,1,10, - 1,8,1,10,1,4,1,12,1,4,1,12,2,4,2,2, - 1,14,1,16,3,14,1,8,1,12,1,4,1,16,3,14, - 2,12,1,4,2,2,1,12,1,12,1,8,1,14,1,6, - 1,6,2,22,1,8,1,122,20,122,105,112,105,109,112,111, - 114,116,101,114,46,95,95,105,110,105,116,95,95,78,99,3, - 0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,4, - 0,0,0,67,0,0,0,115,78,0,0,0,116,0,124,0, - 124,1,131,2,125,3,124,3,100,1,117,1,114,26,124,0, - 103,0,102,2,83,0,116,1,124,0,124,1,131,2,125,4, - 116,2,124,0,124,4,131,2,114,70,100,1,124,0,106,3, - 155,0,116,4,155,0,124,4,155,0,157,3,103,1,102,2, - 83,0,100,1,103,0,102,2,83,0,41,2,97,239,1,0, - 0,102,105,110,100,95,108,111,97,100,101,114,40,102,117,108, - 108,110,97,109,101,44,32,112,97,116,104,61,78,111,110,101, - 41,32,45,62,32,115,101,108,102,44,32,115,116,114,32,111, - 114,32,78,111,110,101,46,10,10,32,32,32,32,32,32,32, - 32,83,101,97,114,99,104,32,102,111,114,32,97,32,109,111, - 100,117,108,101,32,115,112,101,99,105,102,105,101,100,32,98, - 121,32,39,102,117,108,108,110,97,109,101,39,46,32,39,102, - 117,108,108,110,97,109,101,39,32,109,117,115,116,32,98,101, - 32,116,104,101,10,32,32,32,32,32,32,32,32,102,117,108, - 108,121,32,113,117,97,108,105,102,105,101,100,32,40,100,111, - 116,116,101,100,41,32,109,111,100,117,108,101,32,110,97,109, - 101,46,32,73,116,32,114,101,116,117,114,110,115,32,116,104, - 101,32,122,105,112,105,109,112,111,114,116,101,114,10,32,32, - 32,32,32,32,32,32,105,110,115,116,97,110,99,101,32,105, - 116,115,101,108,102,32,105,102,32,116,104,101,32,109,111,100, - 117,108,101,32,119,97,115,32,102,111,117,110,100,44,32,97, - 32,115,116,114,105,110,103,32,99,111,110,116,97,105,110,105, - 110,103,32,116,104,101,10,32,32,32,32,32,32,32,32,102, - 117,108,108,32,112,97,116,104,32,110,97,109,101,32,105,102, - 32,105,116,39,115,32,112,111,115,115,105,98,108,121,32,97, - 32,112,111,114,116,105,111,110,32,111,102,32,97,32,110,97, - 109,101,115,112,97,99,101,32,112,97,99,107,97,103,101,44, - 10,32,32,32,32,32,32,32,32,111,114,32,78,111,110,101, - 32,111,116,104,101,114,119,105,115,101,46,32,84,104,101,32, - 111,112,116,105,111,110,97,108,32,39,112,97,116,104,39,32, - 97,114,103,117,109,101,110,116,32,105,115,32,105,103,110,111, - 114,101,100,32,45,45,32,105,116,39,115,10,32,32,32,32, - 32,32,32,32,116,104,101,114,101,32,102,111,114,32,99,111, - 109,112,97,116,105,98,105,108,105,116,121,32,119,105,116,104, - 32,116,104,101,32,105,109,112,111,114,116,101,114,32,112,114, - 111,116,111,99,111,108,46,10,32,32,32,32,32,32,32,32, - 78,41,5,218,16,95,103,101,116,95,109,111,100,117,108,101, - 95,105,110,102,111,218,16,95,103,101,116,95,109,111,100,117, - 108,101,95,112,97,116,104,218,7,95,105,115,95,100,105,114, - 114,29,0,0,0,114,20,0,0,0,41,5,114,32,0,0, - 0,218,8,102,117,108,108,110,97,109,101,114,13,0,0,0, - 218,2,109,105,218,7,109,111,100,112,97,116,104,114,9,0, - 0,0,114,9,0,0,0,114,10,0,0,0,218,11,102,105, - 110,100,95,108,111,97,100,101,114,109,0,0,0,115,14,0, - 0,0,0,10,10,1,8,2,8,7,10,1,10,4,24,2, - 122,23,122,105,112,105,109,112,111,114,116,101,114,46,102,105, - 110,100,95,108,111,97,100,101,114,99,3,0,0,0,0,0, - 0,0,0,0,0,0,3,0,0,0,4,0,0,0,67,0, - 0,0,115,16,0,0,0,124,0,160,0,124,1,124,2,161, - 2,100,1,25,0,83,0,41,2,97,139,1,0,0,102,105, - 110,100,95,109,111,100,117,108,101,40,102,117,108,108,110,97, - 109,101,44,32,112,97,116,104,61,78,111,110,101,41,32,45, - 62,32,115,101,108,102,32,111,114,32,78,111,110,101,46,10, - 10,32,32,32,32,32,32,32,32,83,101,97,114,99,104,32, - 102,111,114,32,97,32,109,111,100,117,108,101,32,115,112,101, - 99,105,102,105,101,100,32,98,121,32,39,102,117,108,108,110, - 97,109,101,39,46,32,39,102,117,108,108,110,97,109,101,39, - 32,109,117,115,116,32,98,101,32,116,104,101,10,32,32,32, - 32,32,32,32,32,102,117,108,108,121,32,113,117,97,108,105, - 102,105,101,100,32,40,100,111,116,116,101,100,41,32,109,111, - 100,117,108,101,32,110,97,109,101,46,32,73,116,32,114,101, - 116,117,114,110,115,32,116,104,101,32,122,105,112,105,109,112, - 111,114,116,101,114,10,32,32,32,32,32,32,32,32,105,110, - 115,116,97,110,99,101,32,105,116,115,101,108,102,32,105,102, - 32,116,104,101,32,109,111,100,117,108,101,32,119,97,115,32, - 102,111,117,110,100,44,32,111,114,32,78,111,110,101,32,105, - 102,32,105,116,32,119,97,115,110,39,116,46,10,32,32,32, - 32,32,32,32,32,84,104,101,32,111,112,116,105,111,110,97, - 108,32,39,112,97,116,104,39,32,97,114,103,117,109,101,110, - 116,32,105,115,32,105,103,110,111,114,101,100,32,45,45,32, - 105,116,39,115,32,116,104,101,114,101,32,102,111,114,32,99, - 111,109,112,97,116,105,98,105,108,105,116,121,10,32,32,32, - 32,32,32,32,32,119,105,116,104,32,116,104,101,32,105,109, - 112,111,114,116,101,114,32,112,114,111,116,111,99,111,108,46, - 10,32,32,32,32,32,32,32,32,114,0,0,0,0,41,1, - 114,41,0,0,0,41,3,114,32,0,0,0,114,38,0,0, - 0,114,13,0,0,0,114,9,0,0,0,114,9,0,0,0, - 114,10,0,0,0,218,11,102,105,110,100,95,109,111,100,117, - 108,101,141,0,0,0,115,2,0,0,0,0,9,122,23,122, - 105,112,105,109,112,111,114,116,101,114,46,102,105,110,100,95, - 109,111,100,117,108,101,99,2,0,0,0,0,0,0,0,0, - 0,0,0,5,0,0,0,3,0,0,0,67,0,0,0,115, - 20,0,0,0,116,0,124,0,124,1,131,2,92,3,125,2, - 125,3,125,4,124,2,83,0,41,1,122,163,103,101,116,95, - 99,111,100,101,40,102,117,108,108,110,97,109,101,41,32,45, - 62,32,99,111,100,101,32,111,98,106,101,99,116,46,10,10, - 32,32,32,32,32,32,32,32,82,101,116,117,114,110,32,116, - 104,101,32,99,111,100,101,32,111,98,106,101,99,116,32,102, - 111,114,32,116,104,101,32,115,112,101,99,105,102,105,101,100, - 32,109,111,100,117,108,101,46,32,82,97,105,115,101,32,90, - 105,112,73,109,112,111,114,116,69,114,114,111,114,10,32,32, - 32,32,32,32,32,32,105,102,32,116,104,101,32,109,111,100, - 117,108,101,32,99,111,117,108,100,110,39,116,32,98,101,32, - 102,111,117,110,100,46,10,32,32,32,32,32,32,32,32,169, - 1,218,16,95,103,101,116,95,109,111,100,117,108,101,95,99, - 111,100,101,169,5,114,32,0,0,0,114,38,0,0,0,218, - 4,99,111,100,101,218,9,105,115,112,97,99,107,97,103,101, - 114,40,0,0,0,114,9,0,0,0,114,9,0,0,0,114, - 10,0,0,0,218,8,103,101,116,95,99,111,100,101,153,0, - 0,0,115,4,0,0,0,0,6,16,1,122,20,122,105,112, - 105,109,112,111,114,116,101,114,46,103,101,116,95,99,111,100, - 101,99,2,0,0,0,0,0,0,0,0,0,0,0,4,0, - 0,0,8,0,0,0,67,0,0,0,115,116,0,0,0,116, - 0,114,16,124,1,160,1,116,0,116,2,161,2,125,1,124, - 1,125,2,124,1,160,3,124,0,106,4,116,2,23,0,161, - 1,114,58,124,1,116,5,124,0,106,4,116,2,23,0,131, - 1,100,1,133,2,25,0,125,2,122,14,124,0,106,6,124, - 2,25,0,125,3,87,0,110,30,4,0,116,7,121,102,1, - 0,1,0,1,0,116,8,100,2,100,3,124,2,131,3,130, - 1,89,0,110,2,48,0,116,9,124,0,106,4,124,3,131, - 2,83,0,41,4,122,154,103,101,116,95,100,97,116,97,40, - 112,97,116,104,110,97,109,101,41,32,45,62,32,115,116,114, - 105,110,103,32,119,105,116,104,32,102,105,108,101,32,100,97, - 116,97,46,10,10,32,32,32,32,32,32,32,32,82,101,116, - 117,114,110,32,116,104,101,32,100,97,116,97,32,97,115,115, - 111,99,105,97,116,101,100,32,119,105,116,104,32,39,112,97, - 116,104,110,97,109,101,39,46,32,82,97,105,115,101,32,79, - 83,69,114,114,111,114,32,105,102,10,32,32,32,32,32,32, - 32,32,116,104,101,32,102,105,108,101,32,119,97,115,110,39, - 116,32,102,111,117,110,100,46,10,32,32,32,32,32,32,32, - 32,78,114,0,0,0,0,218,0,41,10,114,18,0,0,0, - 114,19,0,0,0,114,20,0,0,0,218,10,115,116,97,114, - 116,115,119,105,116,104,114,29,0,0,0,218,3,108,101,110, - 114,28,0,0,0,114,26,0,0,0,114,22,0,0,0,218, - 9,95,103,101,116,95,100,97,116,97,41,4,114,32,0,0, - 0,218,8,112,97,116,104,110,97,109,101,90,3,107,101,121, - 218,9,116,111,99,95,101,110,116,114,121,114,9,0,0,0, - 114,9,0,0,0,114,10,0,0,0,218,8,103,101,116,95, - 100,97,116,97,163,0,0,0,115,20,0,0,0,0,6,4, - 1,12,2,4,1,16,1,22,2,2,1,14,1,12,1,18, - 1,122,20,122,105,112,105,109,112,111,114,116,101,114,46,103, - 101,116,95,100,97,116,97,99,2,0,0,0,0,0,0,0, - 0,0,0,0,5,0,0,0,3,0,0,0,67,0,0,0, - 115,20,0,0,0,116,0,124,0,124,1,131,2,92,3,125, - 2,125,3,125,4,124,4,83,0,41,1,122,106,103,101,116, - 95,102,105,108,101,110,97,109,101,40,102,117,108,108,110,97, - 109,101,41,32,45,62,32,102,105,108,101,110,97,109,101,32, - 115,116,114,105,110,103,46,10,10,32,32,32,32,32,32,32, - 32,82,101,116,117,114,110,32,116,104,101,32,102,105,108,101, - 110,97,109,101,32,102,111,114,32,116,104,101,32,115,112,101, - 99,105,102,105,101,100,32,109,111,100,117,108,101,46,10,32, - 32,32,32,32,32,32,32,114,43,0,0,0,114,45,0,0, - 0,114,9,0,0,0,114,9,0,0,0,114,10,0,0,0, - 218,12,103,101,116,95,102,105,108,101,110,97,109,101,184,0, - 0,0,115,4,0,0,0,0,7,16,1,122,24,122,105,112, - 105,109,112,111,114,116,101,114,46,103,101,116,95,102,105,108, - 101,110,97,109,101,99,2,0,0,0,0,0,0,0,0,0, - 0,0,6,0,0,0,8,0,0,0,67,0,0,0,115,126, - 0,0,0,116,0,124,0,124,1,131,2,125,2,124,2,100, - 1,117,0,114,36,116,1,100,2,124,1,155,2,157,2,124, - 1,100,3,141,2,130,1,116,2,124,0,124,1,131,2,125, - 3,124,2,114,64,116,3,160,4,124,3,100,4,161,2,125, - 4,110,10,124,3,155,0,100,5,157,2,125,4,122,14,124, - 0,106,5,124,4,25,0,125,5,87,0,110,20,4,0,116, - 6,121,108,1,0,1,0,1,0,89,0,100,1,83,0,48, - 0,116,7,124,0,106,8,124,5,131,2,160,9,161,0,83, - 0,41,6,122,253,103,101,116,95,115,111,117,114,99,101,40, - 102,117,108,108,110,97,109,101,41,32,45,62,32,115,111,117, - 114,99,101,32,115,116,114,105,110,103,46,10,10,32,32,32, - 32,32,32,32,32,82,101,116,117,114,110,32,116,104,101,32, - 115,111,117,114,99,101,32,99,111,100,101,32,102,111,114,32, - 116,104,101,32,115,112,101,99,105,102,105,101,100,32,109,111, - 100,117,108,101,46,32,82,97,105,115,101,32,90,105,112,73, - 109,112,111,114,116,69,114,114,111,114,10,32,32,32,32,32, - 32,32,32,105,102,32,116,104,101,32,109,111,100,117,108,101, - 32,99,111,117,108,100,110,39,116,32,98,101,32,102,111,117, - 110,100,44,32,114,101,116,117,114,110,32,78,111,110,101,32, - 105,102,32,116,104,101,32,97,114,99,104,105,118,101,32,100, - 111,101,115,10,32,32,32,32,32,32,32,32,99,111,110,116, - 97,105,110,32,116,104,101,32,109,111,100,117,108,101,44,32, - 98,117,116,32,104,97,115,32,110,111,32,115,111,117,114,99, - 101,32,102,111,114,32,105,116,46,10,32,32,32,32,32,32, - 32,32,78,250,18,99,97,110,39,116,32,102,105,110,100,32, - 109,111,100,117,108,101,32,169,1,218,4,110,97,109,101,250, - 11,95,95,105,110,105,116,95,95,46,112,121,250,3,46,112, - 121,41,10,114,35,0,0,0,114,3,0,0,0,114,36,0, - 0,0,114,21,0,0,0,114,30,0,0,0,114,28,0,0, - 0,114,26,0,0,0,114,52,0,0,0,114,29,0,0,0, - 218,6,100,101,99,111,100,101,41,6,114,32,0,0,0,114, - 38,0,0,0,114,39,0,0,0,114,13,0,0,0,218,8, - 102,117,108,108,112,97,116,104,114,54,0,0,0,114,9,0, - 0,0,114,9,0,0,0,114,10,0,0,0,218,10,103,101, - 116,95,115,111,117,114,99,101,195,0,0,0,115,24,0,0, - 0,0,7,10,1,8,1,18,2,10,1,4,1,14,2,10, - 2,2,1,14,1,12,2,8,1,122,22,122,105,112,105,109, - 112,111,114,116,101,114,46,103,101,116,95,115,111,117,114,99, - 101,99,2,0,0,0,0,0,0,0,0,0,0,0,3,0, - 0,0,4,0,0,0,67,0,0,0,115,40,0,0,0,116, - 0,124,0,124,1,131,2,125,2,124,2,100,1,117,0,114, - 36,116,1,100,2,124,1,155,2,157,2,124,1,100,3,141, - 2,130,1,124,2,83,0,41,4,122,171,105,115,95,112,97, - 99,107,97,103,101,40,102,117,108,108,110,97,109,101,41,32, - 45,62,32,98,111,111,108,46,10,10,32,32,32,32,32,32, - 32,32,82,101,116,117,114,110,32,84,114,117,101,32,105,102, - 32,116,104,101,32,109,111,100,117,108,101,32,115,112,101,99, - 105,102,105,101,100,32,98,121,32,102,117,108,108,110,97,109, - 101,32,105,115,32,97,32,112,97,99,107,97,103,101,46,10, - 32,32,32,32,32,32,32,32,82,97,105,115,101,32,90,105, - 112,73,109,112,111,114,116,69,114,114,111,114,32,105,102,32, - 116,104,101,32,109,111,100,117,108,101,32,99,111,117,108,100, - 110,39,116,32,98,101,32,102,111,117,110,100,46,10,32,32, - 32,32,32,32,32,32,78,114,57,0,0,0,114,58,0,0, - 0,41,2,114,35,0,0,0,114,3,0,0,0,41,3,114, - 32,0,0,0,114,38,0,0,0,114,39,0,0,0,114,9, - 0,0,0,114,9,0,0,0,114,10,0,0,0,218,10,105, - 115,95,112,97,99,107,97,103,101,221,0,0,0,115,8,0, - 0,0,0,6,10,1,8,1,18,1,122,22,122,105,112,105, - 109,112,111,114,116,101,114,46,105,115,95,112,97,99,107,97, - 103,101,99,2,0,0,0,0,0,0,0,0,0,0,0,8, - 0,0,0,8,0,0,0,67,0,0,0,115,246,0,0,0, - 116,0,124,0,124,1,131,2,92,3,125,2,125,3,125,4, - 116,1,106,2,160,3,124,1,161,1,125,5,124,5,100,1, - 117,0,115,46,116,4,124,5,116,5,131,2,115,64,116,5, - 124,1,131,1,125,5,124,5,116,1,106,2,124,1,60,0, - 124,0,124,5,95,6,122,84,124,3,114,108,116,7,124,0, - 124,1,131,2,125,6,116,8,160,9,124,0,106,10,124,6, - 161,2,125,7,124,7,103,1,124,5,95,11,116,12,124,5, - 100,2,131,2,115,124,116,13,124,5,95,13,116,8,160,14, - 124,5,106,15,124,1,124,4,161,3,1,0,116,16,124,2, - 124,5,106,15,131,2,1,0,87,0,110,22,1,0,1,0, - 1,0,116,1,106,2,124,1,61,0,130,0,89,0,110,2, - 48,0,122,14,116,1,106,2,124,1,25,0,125,5,87,0, - 110,34,4,0,116,17,121,226,1,0,1,0,1,0,116,18, - 100,3,124,1,155,2,100,4,157,3,131,1,130,1,89,0, - 110,2,48,0,116,19,160,20,100,5,124,1,124,4,161,3, - 1,0,124,5,83,0,41,6,122,245,108,111,97,100,95,109, - 111,100,117,108,101,40,102,117,108,108,110,97,109,101,41,32, - 45,62,32,109,111,100,117,108,101,46,10,10,32,32,32,32, - 32,32,32,32,76,111,97,100,32,116,104,101,32,109,111,100, - 117,108,101,32,115,112,101,99,105,102,105,101,100,32,98,121, - 32,39,102,117,108,108,110,97,109,101,39,46,32,39,102,117, - 108,108,110,97,109,101,39,32,109,117,115,116,32,98,101,32, - 116,104,101,10,32,32,32,32,32,32,32,32,102,117,108,108, - 121,32,113,117,97,108,105,102,105,101,100,32,40,100,111,116, - 116,101,100,41,32,109,111,100,117,108,101,32,110,97,109,101, - 46,32,73,116,32,114,101,116,117,114,110,115,32,116,104,101, - 32,105,109,112,111,114,116,101,100,10,32,32,32,32,32,32, - 32,32,109,111,100,117,108,101,44,32,111,114,32,114,97,105, - 115,101,115,32,90,105,112,73,109,112,111,114,116,69,114,114, - 111,114,32,105,102,32,105,116,32,119,97,115,110,39,116,32, - 102,111,117,110,100,46,10,32,32,32,32,32,32,32,32,78, - 218,12,95,95,98,117,105,108,116,105,110,115,95,95,122,14, - 76,111,97,100,101,100,32,109,111,100,117,108,101,32,122,25, - 32,110,111,116,32,102,111,117,110,100,32,105,110,32,115,121, - 115,46,109,111,100,117,108,101,115,122,30,105,109,112,111,114, - 116,32,123,125,32,35,32,108,111,97,100,101,100,32,102,114, - 111,109,32,90,105,112,32,123,125,41,21,114,44,0,0,0, - 218,3,115,121,115,218,7,109,111,100,117,108,101,115,218,3, - 103,101,116,114,15,0,0,0,218,12,95,109,111,100,117,108, - 101,95,116,121,112,101,218,10,95,95,108,111,97,100,101,114, - 95,95,114,36,0,0,0,114,21,0,0,0,114,30,0,0, - 0,114,29,0,0,0,90,8,95,95,112,97,116,104,95,95, - 218,7,104,97,115,97,116,116,114,114,66,0,0,0,90,14, - 95,102,105,120,95,117,112,95,109,111,100,117,108,101,218,8, - 95,95,100,105,99,116,95,95,218,4,101,120,101,99,114,26, - 0,0,0,218,11,73,109,112,111,114,116,69,114,114,111,114, - 218,10,95,98,111,111,116,115,116,114,97,112,218,16,95,118, - 101,114,98,111,115,101,95,109,101,115,115,97,103,101,41,8, - 114,32,0,0,0,114,38,0,0,0,114,46,0,0,0,114, - 47,0,0,0,114,40,0,0,0,90,3,109,111,100,114,13, - 0,0,0,114,63,0,0,0,114,9,0,0,0,114,9,0, - 0,0,114,10,0,0,0,218,11,108,111,97,100,95,109,111, - 100,117,108,101,234,0,0,0,115,48,0,0,0,0,7,16, - 1,12,1,18,1,8,1,10,1,6,2,2,1,4,3,10, - 1,14,1,8,2,10,1,6,1,16,1,16,1,6,1,8, - 1,8,2,2,1,14,1,12,1,22,1,14,1,122,23,122, - 105,112,105,109,112,111,114,116,101,114,46,108,111,97,100,95, - 109,111,100,117,108,101,99,2,0,0,0,0,0,0,0,0, - 0,0,0,3,0,0,0,8,0,0,0,67,0,0,0,115, - 86,0,0,0,122,20,124,0,160,0,124,1,161,1,115,18, - 87,0,100,1,83,0,87,0,110,20,4,0,116,1,121,40, - 1,0,1,0,1,0,89,0,100,1,83,0,48,0,116,2, - 106,3,115,76,100,2,100,3,108,4,109,5,125,2,1,0, - 124,2,160,6,116,2,161,1,1,0,100,4,116,2,95,3, - 116,2,124,0,124,1,131,2,83,0,41,5,122,204,82,101, - 116,117,114,110,32,116,104,101,32,82,101,115,111,117,114,99, - 101,82,101,97,100,101,114,32,102,111,114,32,97,32,112,97, - 99,107,97,103,101,32,105,110,32,97,32,122,105,112,32,102, - 105,108,101,46,10,10,32,32,32,32,32,32,32,32,73,102, - 32,39,102,117,108,108,110,97,109,101,39,32,105,115,32,97, - 32,112,97,99,107,97,103,101,32,119,105,116,104,105,110,32, - 116,104,101,32,122,105,112,32,102,105,108,101,44,32,114,101, - 116,117,114,110,32,116,104,101,10,32,32,32,32,32,32,32, - 32,39,82,101,115,111,117,114,99,101,82,101,97,100,101,114, - 39,32,111,98,106,101,99,116,32,102,111,114,32,116,104,101, - 32,112,97,99,107,97,103,101,46,32,32,79,116,104,101,114, - 119,105,115,101,32,114,101,116,117,114,110,32,78,111,110,101, - 46,10,32,32,32,32,32,32,32,32,78,114,0,0,0,0, - 41,1,218,14,82,101,115,111,117,114,99,101,82,101,97,100, - 101,114,84,41,7,114,65,0,0,0,114,3,0,0,0,218, - 24,95,90,105,112,73,109,112,111,114,116,82,101,115,111,117, - 114,99,101,82,101,97,100,101,114,218,11,95,114,101,103,105, - 115,116,101,114,101,100,90,13,105,109,112,111,114,116,108,105, - 98,46,97,98,99,114,79,0,0,0,90,8,114,101,103,105, - 115,116,101,114,41,3,114,32,0,0,0,114,38,0,0,0, - 114,79,0,0,0,114,9,0,0,0,114,9,0,0,0,114, - 10,0,0,0,218,19,103,101,116,95,114,101,115,111,117,114, - 99,101,95,114,101,97,100,101,114,16,1,0,0,115,20,0, - 0,0,0,6,2,1,10,1,10,1,12,1,8,1,6,1, - 12,1,10,1,6,1,122,31,122,105,112,105,109,112,111,114, - 116,101,114,46,103,101,116,95,114,101,115,111,117,114,99,101, - 95,114,101,97,100,101,114,99,1,0,0,0,0,0,0,0, - 0,0,0,0,1,0,0,0,5,0,0,0,67,0,0,0, - 115,24,0,0,0,100,1,124,0,106,0,155,0,116,1,155, - 0,124,0,106,2,155,0,100,2,157,5,83,0,41,3,78, - 122,21,60,122,105,112,105,109,112,111,114,116,101,114,32,111, - 98,106,101,99,116,32,34,122,2,34,62,41,3,114,29,0, - 0,0,114,20,0,0,0,114,31,0,0,0,41,1,114,32, + 100,44,132,0,90,42,100,45,100,46,132,0,90,43,100,2, + 83,0,41,47,97,80,2,0,0,122,105,112,105,109,112,111, + 114,116,32,112,114,111,118,105,100,101,115,32,115,117,112,112, + 111,114,116,32,102,111,114,32,105,109,112,111,114,116,105,110, + 103,32,80,121,116,104,111,110,32,109,111,100,117,108,101,115, + 32,102,114,111,109,32,90,105,112,32,97,114,99,104,105,118, + 101,115,46,10,10,84,104,105,115,32,109,111,100,117,108,101, + 32,101,120,112,111,114,116,115,32,116,104,114,101,101,32,111, + 98,106,101,99,116,115,58,10,45,32,122,105,112,105,109,112, + 111,114,116,101,114,58,32,97,32,99,108,97,115,115,59,32, + 105,116,115,32,99,111,110,115,116,114,117,99,116,111,114,32, + 116,97,107,101,115,32,97,32,112,97,116,104,32,116,111,32, + 97,32,90,105,112,32,97,114,99,104,105,118,101,46,10,45, + 32,90,105,112,73,109,112,111,114,116,69,114,114,111,114,58, + 32,101,120,99,101,112,116,105,111,110,32,114,97,105,115,101, + 100,32,98,121,32,122,105,112,105,109,112,111,114,116,101,114, + 32,111,98,106,101,99,116,115,46,32,73,116,39,115,32,97, + 10,32,32,115,117,98,99,108,97,115,115,32,111,102,32,73, + 109,112,111,114,116,69,114,114,111,114,44,32,115,111,32,105, + 116,32,99,97,110,32,98,101,32,99,97,117,103,104,116,32, + 97,115,32,73,109,112,111,114,116,69,114,114,111,114,44,32, + 116,111,111,46,10,45,32,95,122,105,112,95,100,105,114,101, + 99,116,111,114,121,95,99,97,99,104,101,58,32,97,32,100, + 105,99,116,44,32,109,97,112,112,105,110,103,32,97,114,99, + 104,105,118,101,32,112,97,116,104,115,32,116,111,32,122,105, + 112,32,100,105,114,101,99,116,111,114,121,10,32,32,105,110, + 102,111,32,100,105,99,116,115,44,32,97,115,32,117,115,101, + 100,32,105,110,32,122,105,112,105,109,112,111,114,116,101,114, + 46,95,102,105,108,101,115,46,10,10,73,116,32,105,115,32, + 117,115,117,97,108,108,121,32,110,111,116,32,110,101,101,100, + 101,100,32,116,111,32,117,115,101,32,116,104,101,32,122,105, + 112,105,109,112,111,114,116,32,109,111,100,117,108,101,32,101, + 120,112,108,105,99,105,116,108,121,59,32,105,116,32,105,115, + 10,117,115,101,100,32,98,121,32,116,104,101,32,98,117,105, + 108,116,105,110,32,105,109,112,111,114,116,32,109,101,99,104, + 97,110,105,115,109,32,102,111,114,32,115,121,115,46,112,97, + 116,104,32,105,116,101,109,115,32,116,104,97,116,32,97,114, + 101,32,112,97,116,104,115,10,116,111,32,90,105,112,32,97, + 114,99,104,105,118,101,115,46,10,233,0,0,0,0,78,41, + 2,218,14,95,117,110,112,97,99,107,95,117,105,110,116,49, + 54,218,14,95,117,110,112,97,99,107,95,117,105,110,116,51, + 50,218,14,90,105,112,73,109,112,111,114,116,69,114,114,111, + 114,218,11,122,105,112,105,109,112,111,114,116,101,114,233,1, + 0,0,0,99,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,1,0,0,0,64,0,0,0,115,12,0,0, + 0,101,0,90,1,100,0,90,2,100,1,83,0,41,2,114, + 3,0,0,0,78,41,3,218,8,95,95,110,97,109,101,95, + 95,218,10,95,95,109,111,100,117,108,101,95,95,218,12,95, + 95,113,117,97,108,110,97,109,101,95,95,169,0,114,9,0, + 0,0,114,9,0,0,0,250,18,60,102,114,111,122,101,110, + 32,122,105,112,105,109,112,111,114,116,62,114,3,0,0,0, + 33,0,0,0,115,2,0,0,0,8,1,233,22,0,0,0, + 115,4,0,0,0,80,75,5,6,105,255,255,0,0,99,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,3, + 0,0,0,64,0,0,0,115,108,0,0,0,101,0,90,1, + 100,0,90,2,100,1,90,3,100,2,100,3,132,0,90,4, + 100,25,100,5,100,6,132,1,90,5,100,26,100,7,100,8, + 132,1,90,6,100,9,100,10,132,0,90,7,100,11,100,12, + 132,0,90,8,100,13,100,14,132,0,90,9,100,15,100,16, + 132,0,90,10,100,17,100,18,132,0,90,11,100,19,100,20, + 132,0,90,12,100,21,100,22,132,0,90,13,100,23,100,24, + 132,0,90,14,100,4,83,0,41,27,114,4,0,0,0,97, + 255,1,0,0,122,105,112,105,109,112,111,114,116,101,114,40, + 97,114,99,104,105,118,101,112,97,116,104,41,32,45,62,32, + 122,105,112,105,109,112,111,114,116,101,114,32,111,98,106,101, + 99,116,10,10,32,32,32,32,67,114,101,97,116,101,32,97, + 32,110,101,119,32,122,105,112,105,109,112,111,114,116,101,114, + 32,105,110,115,116,97,110,99,101,46,32,39,97,114,99,104, + 105,118,101,112,97,116,104,39,32,109,117,115,116,32,98,101, + 32,97,32,112,97,116,104,32,116,111,10,32,32,32,32,97, + 32,122,105,112,102,105,108,101,44,32,111,114,32,116,111,32, + 97,32,115,112,101,99,105,102,105,99,32,112,97,116,104,32, + 105,110,115,105,100,101,32,97,32,122,105,112,102,105,108,101, + 46,32,70,111,114,32,101,120,97,109,112,108,101,44,32,105, + 116,32,99,97,110,32,98,101,10,32,32,32,32,39,47,116, + 109,112,47,109,121,105,109,112,111,114,116,46,122,105,112,39, + 44,32,111,114,32,39,47,116,109,112,47,109,121,105,109,112, + 111,114,116,46,122,105,112,47,109,121,100,105,114,101,99,116, + 111,114,121,39,44,32,105,102,32,109,121,100,105,114,101,99, + 116,111,114,121,32,105,115,32,97,10,32,32,32,32,118,97, + 108,105,100,32,100,105,114,101,99,116,111,114,121,32,105,110, + 115,105,100,101,32,116,104,101,32,97,114,99,104,105,118,101, + 46,10,10,32,32,32,32,39,90,105,112,73,109,112,111,114, + 116,69,114,114,111,114,32,105,115,32,114,97,105,115,101,100, + 32,105,102,32,39,97,114,99,104,105,118,101,112,97,116,104, + 39,32,100,111,101,115,110,39,116,32,112,111,105,110,116,32, + 116,111,32,97,32,118,97,108,105,100,32,90,105,112,10,32, + 32,32,32,97,114,99,104,105,118,101,46,10,10,32,32,32, + 32,84,104,101,32,39,97,114,99,104,105,118,101,39,32,97, + 116,116,114,105,98,117,116,101,32,111,102,32,122,105,112,105, + 109,112,111,114,116,101,114,32,111,98,106,101,99,116,115,32, + 99,111,110,116,97,105,110,115,32,116,104,101,32,110,97,109, + 101,32,111,102,32,116,104,101,10,32,32,32,32,122,105,112, + 102,105,108,101,32,116,97,114,103,101,116,101,100,46,10,32, + 32,32,32,99,2,0,0,0,0,0,0,0,0,0,0,0, + 8,0,0,0,9,0,0,0,67,0,0,0,115,32,1,0, + 0,116,0,124,1,116,1,131,2,115,28,100,1,100,0,108, + 2,125,2,124,2,160,3,124,1,161,1,125,1,124,1,115, + 44,116,4,100,2,124,1,100,3,141,2,130,1,116,5,114, + 60,124,1,160,6,116,5,116,7,161,2,125,1,103,0,125, + 3,122,14,116,8,160,9,124,1,161,1,125,4,87,0,110, + 70,4,0,116,10,116,11,102,2,121,148,1,0,1,0,1, + 0,116,8,160,12,124,1,161,1,92,2,125,5,125,6,124, + 5,124,1,107,2,114,130,116,4,100,4,124,1,100,3,141, + 2,130,1,124,5,125,1,124,3,160,13,124,6,161,1,1, + 0,89,0,113,64,48,0,124,4,106,14,100,5,64,0,100, + 6,107,3,114,180,116,4,100,4,124,1,100,3,141,2,130, + 1,113,180,113,64,122,12,116,15,124,1,25,0,125,7,87, + 0,110,34,4,0,116,16,121,226,1,0,1,0,1,0,116, + 17,124,1,131,1,125,7,124,7,116,15,124,1,60,0,89, + 0,110,2,48,0,124,7,124,0,95,18,124,1,124,0,95, + 19,116,8,106,20,124,3,100,0,100,0,100,7,133,3,25, + 0,142,0,124,0,95,21,124,0,106,21,144,1,114,28,124, + 0,4,0,106,21,116,7,55,0,2,0,95,21,100,0,83, + 0,41,8,78,114,0,0,0,0,122,21,97,114,99,104,105, + 118,101,32,112,97,116,104,32,105,115,32,101,109,112,116,121, + 169,1,218,4,112,97,116,104,122,14,110,111,116,32,97,32, + 90,105,112,32,102,105,108,101,105,0,240,0,0,105,0,128, + 0,0,233,255,255,255,255,41,22,218,10,105,115,105,110,115, + 116,97,110,99,101,218,3,115,116,114,218,2,111,115,90,8, + 102,115,100,101,99,111,100,101,114,3,0,0,0,218,12,97, + 108,116,95,112,97,116,104,95,115,101,112,218,7,114,101,112, + 108,97,99,101,218,8,112,97,116,104,95,115,101,112,218,19, + 95,98,111,111,116,115,116,114,97,112,95,101,120,116,101,114, + 110,97,108,90,10,95,112,97,116,104,95,115,116,97,116,218, + 7,79,83,69,114,114,111,114,218,10,86,97,108,117,101,69, + 114,114,111,114,90,11,95,112,97,116,104,95,115,112,108,105, + 116,218,6,97,112,112,101,110,100,90,7,115,116,95,109,111, + 100,101,218,20,95,122,105,112,95,100,105,114,101,99,116,111, + 114,121,95,99,97,99,104,101,218,8,75,101,121,69,114,114, + 111,114,218,15,95,114,101,97,100,95,100,105,114,101,99,116, + 111,114,121,218,6,95,102,105,108,101,115,218,7,97,114,99, + 104,105,118,101,218,10,95,112,97,116,104,95,106,111,105,110, + 218,6,112,114,101,102,105,120,41,8,218,4,115,101,108,102, + 114,13,0,0,0,114,17,0,0,0,114,31,0,0,0,90, + 2,115,116,90,7,100,105,114,110,97,109,101,90,8,98,97, + 115,101,110,97,109,101,218,5,102,105,108,101,115,114,9,0, + 0,0,114,9,0,0,0,114,10,0,0,0,218,8,95,95, + 105,110,105,116,95,95,63,0,0,0,115,58,0,0,0,0, + 1,10,1,8,1,10,1,4,1,12,1,4,1,12,2,4, + 2,2,1,14,1,16,3,14,1,8,1,12,1,4,1,16, + 3,14,2,12,1,4,2,2,1,12,1,12,1,8,1,14, + 1,6,1,6,2,22,1,8,1,122,20,122,105,112,105,109, + 112,111,114,116,101,114,46,95,95,105,110,105,116,95,95,78, + 99,3,0,0,0,0,0,0,0,0,0,0,0,5,0,0, + 0,4,0,0,0,67,0,0,0,115,78,0,0,0,116,0, + 124,0,124,1,131,2,125,3,124,3,100,1,117,1,114,26, + 124,0,103,0,102,2,83,0,116,1,124,0,124,1,131,2, + 125,4,116,2,124,0,124,4,131,2,114,70,100,1,124,0, + 106,3,155,0,116,4,155,0,124,4,155,0,157,3,103,1, + 102,2,83,0,100,1,103,0,102,2,83,0,41,2,97,239, + 1,0,0,102,105,110,100,95,108,111,97,100,101,114,40,102, + 117,108,108,110,97,109,101,44,32,112,97,116,104,61,78,111, + 110,101,41,32,45,62,32,115,101,108,102,44,32,115,116,114, + 32,111,114,32,78,111,110,101,46,10,10,32,32,32,32,32, + 32,32,32,83,101,97,114,99,104,32,102,111,114,32,97,32, + 109,111,100,117,108,101,32,115,112,101,99,105,102,105,101,100, + 32,98,121,32,39,102,117,108,108,110,97,109,101,39,46,32, + 39,102,117,108,108,110,97,109,101,39,32,109,117,115,116,32, + 98,101,32,116,104,101,10,32,32,32,32,32,32,32,32,102, + 117,108,108,121,32,113,117,97,108,105,102,105,101,100,32,40, + 100,111,116,116,101,100,41,32,109,111,100,117,108,101,32,110, + 97,109,101,46,32,73,116,32,114,101,116,117,114,110,115,32, + 116,104,101,32,122,105,112,105,109,112,111,114,116,101,114,10, + 32,32,32,32,32,32,32,32,105,110,115,116,97,110,99,101, + 32,105,116,115,101,108,102,32,105,102,32,116,104,101,32,109, + 111,100,117,108,101,32,119,97,115,32,102,111,117,110,100,44, + 32,97,32,115,116,114,105,110,103,32,99,111,110,116,97,105, + 110,105,110,103,32,116,104,101,10,32,32,32,32,32,32,32, + 32,102,117,108,108,32,112,97,116,104,32,110,97,109,101,32, + 105,102,32,105,116,39,115,32,112,111,115,115,105,98,108,121, + 32,97,32,112,111,114,116,105,111,110,32,111,102,32,97,32, + 110,97,109,101,115,112,97,99,101,32,112,97,99,107,97,103, + 101,44,10,32,32,32,32,32,32,32,32,111,114,32,78,111, + 110,101,32,111,116,104,101,114,119,105,115,101,46,32,84,104, + 101,32,111,112,116,105,111,110,97,108,32,39,112,97,116,104, + 39,32,97,114,103,117,109,101,110,116,32,105,115,32,105,103, + 110,111,114,101,100,32,45,45,32,105,116,39,115,10,32,32, + 32,32,32,32,32,32,116,104,101,114,101,32,102,111,114,32, + 99,111,109,112,97,116,105,98,105,108,105,116,121,32,119,105, + 116,104,32,116,104,101,32,105,109,112,111,114,116,101,114,32, + 112,114,111,116,111,99,111,108,46,10,32,32,32,32,32,32, + 32,32,78,41,5,218,16,95,103,101,116,95,109,111,100,117, + 108,101,95,105,110,102,111,218,16,95,103,101,116,95,109,111, + 100,117,108,101,95,112,97,116,104,218,7,95,105,115,95,100, + 105,114,114,29,0,0,0,114,20,0,0,0,41,5,114,32, + 0,0,0,218,8,102,117,108,108,110,97,109,101,114,13,0, + 0,0,218,2,109,105,218,7,109,111,100,112,97,116,104,114, + 9,0,0,0,114,9,0,0,0,114,10,0,0,0,218,11, + 102,105,110,100,95,108,111,97,100,101,114,109,0,0,0,115, + 14,0,0,0,0,10,10,1,8,2,8,7,10,1,10,4, + 24,2,122,23,122,105,112,105,109,112,111,114,116,101,114,46, + 102,105,110,100,95,108,111,97,100,101,114,99,3,0,0,0, + 0,0,0,0,0,0,0,0,3,0,0,0,4,0,0,0, + 67,0,0,0,115,16,0,0,0,124,0,160,0,124,1,124, + 2,161,2,100,1,25,0,83,0,41,2,97,139,1,0,0, + 102,105,110,100,95,109,111,100,117,108,101,40,102,117,108,108, + 110,97,109,101,44,32,112,97,116,104,61,78,111,110,101,41, + 32,45,62,32,115,101,108,102,32,111,114,32,78,111,110,101, + 46,10,10,32,32,32,32,32,32,32,32,83,101,97,114,99, + 104,32,102,111,114,32,97,32,109,111,100,117,108,101,32,115, + 112,101,99,105,102,105,101,100,32,98,121,32,39,102,117,108, + 108,110,97,109,101,39,46,32,39,102,117,108,108,110,97,109, + 101,39,32,109,117,115,116,32,98,101,32,116,104,101,10,32, + 32,32,32,32,32,32,32,102,117,108,108,121,32,113,117,97, + 108,105,102,105,101,100,32,40,100,111,116,116,101,100,41,32, + 109,111,100,117,108,101,32,110,97,109,101,46,32,73,116,32, + 114,101,116,117,114,110,115,32,116,104,101,32,122,105,112,105, + 109,112,111,114,116,101,114,10,32,32,32,32,32,32,32,32, + 105,110,115,116,97,110,99,101,32,105,116,115,101,108,102,32, + 105,102,32,116,104,101,32,109,111,100,117,108,101,32,119,97, + 115,32,102,111,117,110,100,44,32,111,114,32,78,111,110,101, + 32,105,102,32,105,116,32,119,97,115,110,39,116,46,10,32, + 32,32,32,32,32,32,32,84,104,101,32,111,112,116,105,111, + 110,97,108,32,39,112,97,116,104,39,32,97,114,103,117,109, + 101,110,116,32,105,115,32,105,103,110,111,114,101,100,32,45, + 45,32,105,116,39,115,32,116,104,101,114,101,32,102,111,114, + 32,99,111,109,112,97,116,105,98,105,108,105,116,121,10,32, + 32,32,32,32,32,32,32,119,105,116,104,32,116,104,101,32, + 105,109,112,111,114,116,101,114,32,112,114,111,116,111,99,111, + 108,46,10,32,32,32,32,32,32,32,32,114,0,0,0,0, + 41,1,114,41,0,0,0,41,3,114,32,0,0,0,114,38, + 0,0,0,114,13,0,0,0,114,9,0,0,0,114,9,0, + 0,0,114,10,0,0,0,218,11,102,105,110,100,95,109,111, + 100,117,108,101,141,0,0,0,115,2,0,0,0,0,9,122, + 23,122,105,112,105,109,112,111,114,116,101,114,46,102,105,110, + 100,95,109,111,100,117,108,101,99,2,0,0,0,0,0,0, + 0,0,0,0,0,5,0,0,0,3,0,0,0,67,0,0, + 0,115,20,0,0,0,116,0,124,0,124,1,131,2,92,3, + 125,2,125,3,125,4,124,2,83,0,41,1,122,163,103,101, + 116,95,99,111,100,101,40,102,117,108,108,110,97,109,101,41, + 32,45,62,32,99,111,100,101,32,111,98,106,101,99,116,46, + 10,10,32,32,32,32,32,32,32,32,82,101,116,117,114,110, + 32,116,104,101,32,99,111,100,101,32,111,98,106,101,99,116, + 32,102,111,114,32,116,104,101,32,115,112,101,99,105,102,105, + 101,100,32,109,111,100,117,108,101,46,32,82,97,105,115,101, + 32,90,105,112,73,109,112,111,114,116,69,114,114,111,114,10, + 32,32,32,32,32,32,32,32,105,102,32,116,104,101,32,109, + 111,100,117,108,101,32,99,111,117,108,100,110,39,116,32,98, + 101,32,102,111,117,110,100,46,10,32,32,32,32,32,32,32, + 32,169,1,218,16,95,103,101,116,95,109,111,100,117,108,101, + 95,99,111,100,101,169,5,114,32,0,0,0,114,38,0,0, + 0,218,4,99,111,100,101,218,9,105,115,112,97,99,107,97, + 103,101,114,40,0,0,0,114,9,0,0,0,114,9,0,0, + 0,114,10,0,0,0,218,8,103,101,116,95,99,111,100,101, + 153,0,0,0,115,4,0,0,0,0,6,16,1,122,20,122, + 105,112,105,109,112,111,114,116,101,114,46,103,101,116,95,99, + 111,100,101,99,2,0,0,0,0,0,0,0,0,0,0,0, + 4,0,0,0,8,0,0,0,67,0,0,0,115,116,0,0, + 0,116,0,114,16,124,1,160,1,116,0,116,2,161,2,125, + 1,124,1,125,2,124,1,160,3,124,0,106,4,116,2,23, + 0,161,1,114,58,124,1,116,5,124,0,106,4,116,2,23, + 0,131,1,100,1,133,2,25,0,125,2,122,14,124,0,106, + 6,124,2,25,0,125,3,87,0,110,30,4,0,116,7,121, + 102,1,0,1,0,1,0,116,8,100,2,100,3,124,2,131, + 3,130,1,89,0,110,2,48,0,116,9,124,0,106,4,124, + 3,131,2,83,0,41,4,122,154,103,101,116,95,100,97,116, + 97,40,112,97,116,104,110,97,109,101,41,32,45,62,32,115, + 116,114,105,110,103,32,119,105,116,104,32,102,105,108,101,32, + 100,97,116,97,46,10,10,32,32,32,32,32,32,32,32,82, + 101,116,117,114,110,32,116,104,101,32,100,97,116,97,32,97, + 115,115,111,99,105,97,116,101,100,32,119,105,116,104,32,39, + 112,97,116,104,110,97,109,101,39,46,32,82,97,105,115,101, + 32,79,83,69,114,114,111,114,32,105,102,10,32,32,32,32, + 32,32,32,32,116,104,101,32,102,105,108,101,32,119,97,115, + 110,39,116,32,102,111,117,110,100,46,10,32,32,32,32,32, + 32,32,32,78,114,0,0,0,0,218,0,41,10,114,18,0, + 0,0,114,19,0,0,0,114,20,0,0,0,218,10,115,116, + 97,114,116,115,119,105,116,104,114,29,0,0,0,218,3,108, + 101,110,114,28,0,0,0,114,26,0,0,0,114,22,0,0, + 0,218,9,95,103,101,116,95,100,97,116,97,41,4,114,32, + 0,0,0,218,8,112,97,116,104,110,97,109,101,90,3,107, + 101,121,218,9,116,111,99,95,101,110,116,114,121,114,9,0, + 0,0,114,9,0,0,0,114,10,0,0,0,218,8,103,101, + 116,95,100,97,116,97,163,0,0,0,115,20,0,0,0,0, + 6,4,1,12,2,4,1,16,1,22,2,2,1,14,1,12, + 1,18,1,122,20,122,105,112,105,109,112,111,114,116,101,114, + 46,103,101,116,95,100,97,116,97,99,2,0,0,0,0,0, + 0,0,0,0,0,0,5,0,0,0,3,0,0,0,67,0, + 0,0,115,20,0,0,0,116,0,124,0,124,1,131,2,92, + 3,125,2,125,3,125,4,124,4,83,0,41,1,122,106,103, + 101,116,95,102,105,108,101,110,97,109,101,40,102,117,108,108, + 110,97,109,101,41,32,45,62,32,102,105,108,101,110,97,109, + 101,32,115,116,114,105,110,103,46,10,10,32,32,32,32,32, + 32,32,32,82,101,116,117,114,110,32,116,104,101,32,102,105, + 108,101,110,97,109,101,32,102,111,114,32,116,104,101,32,115, + 112,101,99,105,102,105,101,100,32,109,111,100,117,108,101,46, + 10,32,32,32,32,32,32,32,32,114,43,0,0,0,114,45, 0,0,0,114,9,0,0,0,114,9,0,0,0,114,10,0, - 0,0,218,8,95,95,114,101,112,114,95,95,34,1,0,0, - 115,2,0,0,0,0,1,122,20,122,105,112,105,109,112,111, - 114,116,101,114,46,95,95,114,101,112,114,95,95,41,1,78, - 41,1,78,41,15,114,6,0,0,0,114,7,0,0,0,114, - 8,0,0,0,218,7,95,95,100,111,99,95,95,114,34,0, - 0,0,114,41,0,0,0,114,42,0,0,0,114,48,0,0, - 0,114,55,0,0,0,114,56,0,0,0,114,64,0,0,0, - 114,65,0,0,0,114,78,0,0,0,114,82,0,0,0,114, - 83,0,0,0,114,9,0,0,0,114,9,0,0,0,114,9, - 0,0,0,114,10,0,0,0,114,4,0,0,0,45,0,0, - 0,115,24,0,0,0,8,1,4,17,8,46,10,32,10,12, - 8,10,8,21,8,11,8,26,8,13,8,38,8,18,122,12, - 95,95,105,110,105,116,95,95,46,112,121,99,84,114,60,0, - 0,0,70,41,3,122,4,46,112,121,99,84,70,41,3,114, - 61,0,0,0,70,70,99,2,0,0,0,0,0,0,0,0, - 0,0,0,2,0,0,0,4,0,0,0,67,0,0,0,115, - 20,0,0,0,124,0,106,0,124,1,160,1,100,1,161,1, - 100,2,25,0,23,0,83,0,41,3,78,218,1,46,233,2, - 0,0,0,41,2,114,31,0,0,0,218,10,114,112,97,114, - 116,105,116,105,111,110,41,2,114,32,0,0,0,114,38,0, - 0,0,114,9,0,0,0,114,9,0,0,0,114,10,0,0, - 0,114,36,0,0,0,52,1,0,0,115,2,0,0,0,0, - 1,114,36,0,0,0,99,2,0,0,0,0,0,0,0,0, - 0,0,0,3,0,0,0,2,0,0,0,67,0,0,0,115, - 18,0,0,0,124,1,116,0,23,0,125,2,124,2,124,0, - 106,1,118,0,83,0,169,1,78,41,2,114,20,0,0,0, - 114,28,0,0,0,41,3,114,32,0,0,0,114,13,0,0, - 0,90,7,100,105,114,112,97,116,104,114,9,0,0,0,114, - 9,0,0,0,114,10,0,0,0,114,37,0,0,0,56,1, - 0,0,115,4,0,0,0,0,4,8,2,114,37,0,0,0, - 99,2,0,0,0,0,0,0,0,0,0,0,0,7,0,0, - 0,4,0,0,0,67,0,0,0,115,56,0,0,0,116,0, - 124,0,124,1,131,2,125,2,116,1,68,0,93,36,92,3, - 125,3,125,4,125,5,124,2,124,3,23,0,125,6,124,6, - 124,0,106,2,118,0,114,14,124,5,2,0,1,0,83,0, - 113,14,100,0,83,0,114,88,0,0,0,41,3,114,36,0, - 0,0,218,16,95,122,105,112,95,115,101,97,114,99,104,111, - 114,100,101,114,114,28,0,0,0,41,7,114,32,0,0,0, - 114,38,0,0,0,114,13,0,0,0,218,6,115,117,102,102, - 105,120,218,10,105,115,98,121,116,101,99,111,100,101,114,47, - 0,0,0,114,63,0,0,0,114,9,0,0,0,114,9,0, - 0,0,114,10,0,0,0,114,35,0,0,0,65,1,0,0, - 115,12,0,0,0,0,1,10,1,14,1,8,1,10,1,10, - 1,114,35,0,0,0,99,1,0,0,0,0,0,0,0,0, - 0,0,0,26,0,0,0,9,0,0,0,67,0,0,0,115, - 2,5,0,0,122,14,116,0,160,1,124,0,161,1,125,1, - 87,0,110,36,4,0,116,2,121,50,1,0,1,0,1,0, - 116,3,100,1,124,0,155,2,157,2,124,0,100,2,141,2, - 130,1,89,0,110,2,48,0,124,1,144,4,143,164,1,0, - 122,36,124,1,160,4,116,5,11,0,100,3,161,2,1,0, - 124,1,160,6,161,0,125,2,124,1,160,7,116,5,161,1, - 125,3,87,0,110,36,4,0,116,2,121,132,1,0,1,0, - 1,0,116,3,100,4,124,0,155,2,157,2,124,0,100,2, - 141,2,130,1,89,0,110,2,48,0,116,8,124,3,131,1, - 116,5,107,3,114,164,116,3,100,4,124,0,155,2,157,2, - 124,0,100,2,141,2,130,1,124,3,100,0,100,5,133,2, - 25,0,116,9,107,3,144,1,114,170,122,24,124,1,160,4, - 100,6,100,3,161,2,1,0,124,1,160,6,161,0,125,4, - 87,0,110,36,4,0,116,2,121,242,1,0,1,0,1,0, - 116,3,100,4,124,0,155,2,157,2,124,0,100,2,141,2, - 130,1,89,0,110,2,48,0,116,10,124,4,116,11,24,0, - 116,5,24,0,100,6,131,2,125,5,122,22,124,1,160,4, - 124,5,161,1,1,0,124,1,160,7,161,0,125,6,87,0, - 110,38,4,0,116,2,144,1,121,66,1,0,1,0,1,0, - 116,3,100,4,124,0,155,2,157,2,124,0,100,2,141,2, - 130,1,89,0,110,2,48,0,124,6,160,12,116,9,161,1, - 125,7,124,7,100,6,107,0,144,1,114,106,116,3,100,7, - 124,0,155,2,157,2,124,0,100,2,141,2,130,1,124,6, - 124,7,124,7,116,5,23,0,133,2,25,0,125,3,116,8, - 124,3,131,1,116,5,107,3,144,1,114,154,116,3,100,8, - 124,0,155,2,157,2,124,0,100,2,141,2,130,1,124,4, - 116,8,124,6,131,1,24,0,124,7,23,0,125,2,116,13, - 124,3,100,9,100,10,133,2,25,0,131,1,125,8,116,13, - 124,3,100,10,100,11,133,2,25,0,131,1,125,9,124,2, - 124,8,107,0,144,1,114,230,116,3,100,12,124,0,155,2, - 157,2,124,0,100,2,141,2,130,1,124,2,124,9,107,0, - 144,2,114,2,116,3,100,13,124,0,155,2,157,2,124,0, - 100,2,141,2,130,1,124,2,124,8,56,0,125,2,124,2, - 124,9,24,0,125,10,124,10,100,6,107,0,144,2,114,46, - 116,3,100,14,124,0,155,2,157,2,124,0,100,2,141,2, - 130,1,105,0,125,11,100,6,125,12,122,14,124,1,160,4, - 124,2,161,1,1,0,87,0,110,38,4,0,116,2,144,2, - 121,106,1,0,1,0,1,0,116,3,100,4,124,0,155,2, - 157,2,124,0,100,2,141,2,130,1,89,0,110,2,48,0, - 124,1,160,7,100,15,161,1,125,3,116,8,124,3,131,1, - 100,5,107,0,144,2,114,140,116,14,100,16,131,1,130,1, - 124,3,100,0,100,5,133,2,25,0,100,17,107,3,144,2, - 114,162,144,4,113,208,116,8,124,3,131,1,100,15,107,3, - 144,2,114,184,116,14,100,16,131,1,130,1,116,15,124,3, - 100,18,100,19,133,2,25,0,131,1,125,13,116,15,124,3, - 100,19,100,9,133,2,25,0,131,1,125,14,116,15,124,3, - 100,9,100,20,133,2,25,0,131,1,125,15,116,15,124,3, - 100,20,100,10,133,2,25,0,131,1,125,16,116,13,124,3, - 100,10,100,11,133,2,25,0,131,1,125,17,116,13,124,3, - 100,11,100,21,133,2,25,0,131,1,125,18,116,13,124,3, - 100,21,100,22,133,2,25,0,131,1,125,4,116,15,124,3, - 100,22,100,23,133,2,25,0,131,1,125,19,116,15,124,3, - 100,23,100,24,133,2,25,0,131,1,125,20,116,15,124,3, - 100,24,100,25,133,2,25,0,131,1,125,21,116,13,124,3, - 100,26,100,15,133,2,25,0,131,1,125,22,124,19,124,20, - 23,0,124,21,23,0,125,8,124,22,124,9,107,4,144,3, - 114,144,116,3,100,27,124,0,155,2,157,2,124,0,100,2, - 141,2,130,1,124,22,124,10,55,0,125,22,122,14,124,1, - 160,7,124,19,161,1,125,23,87,0,110,38,4,0,116,2, - 144,3,121,204,1,0,1,0,1,0,116,3,100,4,124,0, - 155,2,157,2,124,0,100,2,141,2,130,1,89,0,110,2, - 48,0,116,8,124,23,131,1,124,19,107,3,144,3,114,238, - 116,3,100,4,124,0,155,2,157,2,124,0,100,2,141,2, - 130,1,122,50,116,8,124,1,160,7,124,8,124,19,24,0, - 161,1,131,1,124,8,124,19,24,0,107,3,144,4,114,30, - 116,3,100,4,124,0,155,2,157,2,124,0,100,2,141,2, - 130,1,87,0,110,38,4,0,116,2,144,4,121,70,1,0, - 1,0,1,0,116,3,100,4,124,0,155,2,157,2,124,0, - 100,2,141,2,130,1,89,0,110,2,48,0,124,13,100,28, - 64,0,144,4,114,92,124,23,160,16,161,0,125,23,110,52, - 122,14,124,23,160,16,100,29,161,1,125,23,87,0,110,36, - 4,0,116,17,144,4,121,142,1,0,1,0,1,0,124,23, - 160,16,100,30,161,1,160,18,116,19,161,1,125,23,89,0, - 110,2,48,0,124,23,160,20,100,31,116,21,161,2,125,23, - 116,22,160,23,124,0,124,23,161,2,125,24,124,24,124,14, - 124,18,124,4,124,22,124,15,124,16,124,17,102,8,125,25, - 124,25,124,11,124,23,60,0,124,12,100,32,55,0,125,12, - 144,2,113,108,87,0,100,0,4,0,4,0,131,3,1,0, - 110,18,49,0,144,4,115,230,48,0,1,0,1,0,1,0, - 89,0,1,0,116,24,160,25,100,33,124,12,124,0,161,3, - 1,0,124,11,83,0,41,34,78,122,21,99,97,110,39,116, - 32,111,112,101,110,32,90,105,112,32,102,105,108,101,58,32, - 114,12,0,0,0,114,86,0,0,0,250,21,99,97,110,39, - 116,32,114,101,97,100,32,90,105,112,32,102,105,108,101,58, - 32,233,4,0,0,0,114,0,0,0,0,122,16,110,111,116, - 32,97,32,90,105,112,32,102,105,108,101,58,32,122,18,99, - 111,114,114,117,112,116,32,90,105,112,32,102,105,108,101,58, - 32,233,12,0,0,0,233,16,0,0,0,233,20,0,0,0, - 122,28,98,97,100,32,99,101,110,116,114,97,108,32,100,105, - 114,101,99,116,111,114,121,32,115,105,122,101,58,32,122,30, - 98,97,100,32,99,101,110,116,114,97,108,32,100,105,114,101, - 99,116,111,114,121,32,111,102,102,115,101,116,58,32,122,38, - 98,97,100,32,99,101,110,116,114,97,108,32,100,105,114,101, - 99,116,111,114,121,32,115,105,122,101,32,111,114,32,111,102, - 102,115,101,116,58,32,233,46,0,0,0,250,27,69,79,70, - 32,114,101,97,100,32,119,104,101,114,101,32,110,111,116,32, - 101,120,112,101,99,116,101,100,115,4,0,0,0,80,75,1, - 2,233,8,0,0,0,233,10,0,0,0,233,14,0,0,0, - 233,24,0,0,0,233,28,0,0,0,233,30,0,0,0,233, - 32,0,0,0,233,34,0,0,0,233,42,0,0,0,122,25, - 98,97,100,32,108,111,99,97,108,32,104,101,97,100,101,114, - 32,111,102,102,115,101,116,58,32,105,0,8,0,0,218,5, - 97,115,99,105,105,90,6,108,97,116,105,110,49,250,1,47, - 114,5,0,0,0,122,33,122,105,112,105,109,112,111,114,116, - 58,32,102,111,117,110,100,32,123,125,32,110,97,109,101,115, - 32,105,110,32,123,33,114,125,41,26,218,3,95,105,111,218, - 9,111,112,101,110,95,99,111,100,101,114,22,0,0,0,114, - 3,0,0,0,218,4,115,101,101,107,218,20,69,78,68,95, - 67,69,78,84,82,65,76,95,68,73,82,95,83,73,90,69, - 90,4,116,101,108,108,218,4,114,101,97,100,114,51,0,0, - 0,218,18,83,84,82,73,78,71,95,69,78,68,95,65,82, - 67,72,73,86,69,218,3,109,97,120,218,15,77,65,88,95, - 67,79,77,77,69,78,84,95,76,69,78,218,5,114,102,105, - 110,100,114,2,0,0,0,218,8,69,79,70,69,114,114,111, - 114,114,1,0,0,0,114,62,0,0,0,218,18,85,110,105, - 99,111,100,101,68,101,99,111,100,101,69,114,114,111,114,218, - 9,116,114,97,110,115,108,97,116,101,218,11,99,112,52,51, - 55,95,116,97,98,108,101,114,19,0,0,0,114,20,0,0, - 0,114,21,0,0,0,114,30,0,0,0,114,76,0,0,0, - 114,77,0,0,0,41,26,114,29,0,0,0,218,2,102,112, - 90,15,104,101,97,100,101,114,95,112,111,115,105,116,105,111, - 110,218,6,98,117,102,102,101,114,218,9,102,105,108,101,95, - 115,105,122,101,90,17,109,97,120,95,99,111,109,109,101,110, - 116,95,115,116,97,114,116,218,4,100,97,116,97,90,3,112, - 111,115,218,11,104,101,97,100,101,114,95,115,105,122,101,90, - 13,104,101,97,100,101,114,95,111,102,102,115,101,116,90,10, - 97,114,99,95,111,102,102,115,101,116,114,33,0,0,0,218, - 5,99,111,117,110,116,218,5,102,108,97,103,115,218,8,99, - 111,109,112,114,101,115,115,218,4,116,105,109,101,218,4,100, - 97,116,101,218,3,99,114,99,218,9,100,97,116,97,95,115, - 105,122,101,218,9,110,97,109,101,95,115,105,122,101,218,10, - 101,120,116,114,97,95,115,105,122,101,90,12,99,111,109,109, - 101,110,116,95,115,105,122,101,218,11,102,105,108,101,95,111, - 102,102,115,101,116,114,59,0,0,0,114,13,0,0,0,218, - 1,116,114,9,0,0,0,114,9,0,0,0,114,10,0,0, - 0,114,27,0,0,0,96,1,0,0,115,212,0,0,0,0, - 1,2,1,14,1,12,1,24,2,8,1,2,1,14,1,8, - 1,14,1,12,1,24,1,12,1,18,1,18,3,2,1,12, - 1,12,1,12,1,10,1,2,255,12,2,8,1,2,255,2, - 1,2,255,4,2,2,1,10,1,12,1,14,1,10,1,2, - 255,12,2,10,1,10,1,10,1,2,255,6,2,16,1,14, - 1,10,1,2,255,6,2,16,2,16,1,16,1,10,1,18, - 1,10,1,18,1,8,1,8,1,10,1,18,2,4,2,4, - 1,2,1,14,1,14,1,24,2,10,1,14,1,8,2,18, - 1,4,1,14,1,8,1,16,1,16,1,16,1,16,1,16, - 1,16,1,16,1,16,1,16,1,16,1,16,1,12,1,10, - 1,18,1,8,2,2,1,14,1,14,1,24,1,14,1,18, - 4,2,1,28,1,22,1,14,1,24,2,10,2,10,3,2, - 1,14,1,14,1,22,2,12,1,12,1,20,1,8,1,44, - 1,14,1,114,27,0,0,0,117,190,1,0,0,0,1,2, - 3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18, - 19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34, - 35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50, - 51,52,53,54,55,56,57,58,59,60,61,62,63,64,65,66, - 67,68,69,70,71,72,73,74,75,76,77,78,79,80,81,82, - 83,84,85,86,87,88,89,90,91,92,93,94,95,96,97,98, - 99,100,101,102,103,104,105,106,107,108,109,110,111,112,113,114, - 115,116,117,118,119,120,121,122,123,124,125,126,127,195,135,195, - 188,195,169,195,162,195,164,195,160,195,165,195,167,195,170,195, - 171,195,168,195,175,195,174,195,172,195,132,195,133,195,137,195, - 166,195,134,195,180,195,182,195,178,195,187,195,185,195,191,195, - 150,195,156,194,162,194,163,194,165,226,130,167,198,146,195,161, - 195,173,195,179,195,186,195,177,195,145,194,170,194,186,194,191, - 226,140,144,194,172,194,189,194,188,194,161,194,171,194,187,226, - 150,145,226,150,146,226,150,147,226,148,130,226,148,164,226,149, - 161,226,149,162,226,149,150,226,149,149,226,149,163,226,149,145, - 226,149,151,226,149,157,226,149,156,226,149,155,226,148,144,226, - 148,148,226,148,180,226,148,172,226,148,156,226,148,128,226,148, - 188,226,149,158,226,149,159,226,149,154,226,149,148,226,149,169, - 226,149,166,226,149,160,226,149,144,226,149,172,226,149,167,226, - 149,168,226,149,164,226,149,165,226,149,153,226,149,152,226,149, - 146,226,149,147,226,149,171,226,149,170,226,148,152,226,148,140, - 226,150,136,226,150,132,226,150,140,226,150,144,226,150,128,206, - 177,195,159,206,147,207,128,206,163,207,131,194,181,207,132,206, - 166,206,152,206,169,206,180,226,136,158,207,134,206,181,226,136, - 169,226,137,161,194,177,226,137,165,226,137,164,226,140,160,226, - 140,161,195,183,226,137,136,194,176,226,136,153,194,183,226,136, - 154,226,129,191,194,178,226,150,160,194,160,99,0,0,0,0, - 0,0,0,0,0,0,0,0,1,0,0,0,8,0,0,0, - 67,0,0,0,115,110,0,0,0,116,0,114,22,116,1,160, - 2,100,1,161,1,1,0,116,3,100,2,131,1,130,1,100, - 3,97,0,122,62,122,16,100,4,100,5,108,4,109,5,125, - 0,1,0,87,0,110,36,4,0,116,6,121,80,1,0,1, - 0,1,0,116,1,160,2,100,1,161,1,1,0,116,3,100, - 2,131,1,130,1,89,0,110,2,48,0,87,0,100,6,97, - 0,110,6,100,6,97,0,48,0,116,1,160,2,100,7,161, - 1,1,0,124,0,83,0,41,8,78,122,27,122,105,112,105, - 109,112,111,114,116,58,32,122,108,105,98,32,85,78,65,86, - 65,73,76,65,66,76,69,250,41,99,97,110,39,116,32,100, - 101,99,111,109,112,114,101,115,115,32,100,97,116,97,59,32, - 122,108,105,98,32,110,111,116,32,97,118,97,105,108,97,98, - 108,101,84,114,0,0,0,0,169,1,218,10,100,101,99,111, - 109,112,114,101,115,115,70,122,25,122,105,112,105,109,112,111, - 114,116,58,32,122,108,105,98,32,97,118,97,105,108,97,98, - 108,101,41,7,218,15,95,105,109,112,111,114,116,105,110,103, - 95,122,108,105,98,114,76,0,0,0,114,77,0,0,0,114, - 3,0,0,0,90,4,122,108,105,98,114,141,0,0,0,218, - 9,69,120,99,101,112,116,105,111,110,114,140,0,0,0,114, - 9,0,0,0,114,9,0,0,0,114,10,0,0,0,218,20, - 95,103,101,116,95,100,101,99,111,109,112,114,101,115,115,95, - 102,117,110,99,254,1,0,0,115,24,0,0,0,0,2,4, - 3,10,1,8,2,4,1,4,1,16,1,12,1,10,1,16, - 2,12,2,10,1,114,144,0,0,0,99,2,0,0,0,0, - 0,0,0,0,0,0,0,17,0,0,0,9,0,0,0,67, - 0,0,0,115,144,1,0,0,124,1,92,8,125,2,125,3, - 125,4,125,5,125,6,125,7,125,8,125,9,124,4,100,1, - 107,0,114,36,116,0,100,2,131,1,130,1,116,1,160,2, - 124,0,161,1,144,1,143,14,125,10,122,14,124,10,160,3, - 124,6,161,1,1,0,87,0,110,36,4,0,116,4,121,100, - 1,0,1,0,1,0,116,0,100,3,124,0,155,2,157,2, - 124,0,100,4,141,2,130,1,89,0,110,2,48,0,124,10, - 160,5,100,5,161,1,125,11,116,6,124,11,131,1,100,5, - 107,3,114,132,116,7,100,6,131,1,130,1,124,11,100,0, - 100,7,133,2,25,0,100,8,107,3,114,166,116,0,100,9, - 124,0,155,2,157,2,124,0,100,4,141,2,130,1,116,8, - 124,11,100,10,100,11,133,2,25,0,131,1,125,12,116,8, - 124,11,100,11,100,5,133,2,25,0,131,1,125,13,100,5, - 124,12,23,0,124,13,23,0,125,14,124,6,124,14,55,0, - 125,6,122,14,124,10,160,3,124,6,161,1,1,0,87,0, - 110,38,4,0,116,4,144,1,121,14,1,0,1,0,1,0, - 116,0,100,3,124,0,155,2,157,2,124,0,100,4,141,2, - 130,1,89,0,110,2,48,0,124,10,160,5,124,4,161,1, - 125,15,116,6,124,15,131,1,124,4,107,3,144,1,114,48, - 116,4,100,12,131,1,130,1,87,0,100,0,4,0,4,0, - 131,3,1,0,110,18,49,0,144,1,115,70,48,0,1,0, - 1,0,1,0,89,0,1,0,124,3,100,1,107,2,144,1, - 114,94,124,15,83,0,122,10,116,9,131,0,125,16,87,0, - 110,28,4,0,116,10,144,1,121,132,1,0,1,0,1,0, - 116,0,100,13,131,1,130,1,89,0,110,2,48,0,124,16, - 124,15,100,14,131,2,83,0,41,15,78,114,0,0,0,0, - 122,18,110,101,103,97,116,105,118,101,32,100,97,116,97,32, - 115,105,122,101,114,92,0,0,0,114,12,0,0,0,114,104, - 0,0,0,114,98,0,0,0,114,93,0,0,0,115,4,0, - 0,0,80,75,3,4,122,23,98,97,100,32,108,111,99,97, - 108,32,102,105,108,101,32,104,101,97,100,101,114,58,32,233, - 26,0,0,0,114,103,0,0,0,122,26,122,105,112,105,109, - 112,111,114,116,58,32,99,97,110,39,116,32,114,101,97,100, - 32,100,97,116,97,114,139,0,0,0,105,241,255,255,255,41, - 11,114,3,0,0,0,114,110,0,0,0,114,111,0,0,0, - 114,112,0,0,0,114,22,0,0,0,114,114,0,0,0,114, - 51,0,0,0,114,119,0,0,0,114,1,0,0,0,114,144, - 0,0,0,114,143,0,0,0,41,17,114,29,0,0,0,114, - 54,0,0,0,90,8,100,97,116,97,112,97,116,104,114,130, - 0,0,0,114,134,0,0,0,114,125,0,0,0,114,137,0, - 0,0,114,131,0,0,0,114,132,0,0,0,114,133,0,0, - 0,114,123,0,0,0,114,124,0,0,0,114,135,0,0,0, - 114,136,0,0,0,114,127,0,0,0,90,8,114,97,119,95, - 100,97,116,97,114,141,0,0,0,114,9,0,0,0,114,9, - 0,0,0,114,10,0,0,0,114,52,0,0,0,19,2,0, - 0,115,62,0,0,0,0,1,20,1,8,1,8,2,14,2, - 2,1,14,1,12,1,24,1,10,1,12,1,8,2,16,2, - 18,2,16,1,16,1,12,1,8,1,2,1,14,1,14,1, - 24,1,10,1,14,1,40,2,10,2,4,3,2,1,10,1, - 14,1,14,1,114,52,0,0,0,99,2,0,0,0,0,0, - 0,0,0,0,0,0,2,0,0,0,3,0,0,0,67,0, - 0,0,115,16,0,0,0,116,0,124,0,124,1,24,0,131, - 1,100,1,107,1,83,0,41,2,78,114,5,0,0,0,41, - 1,218,3,97,98,115,41,2,90,2,116,49,90,2,116,50, + 0,0,218,12,103,101,116,95,102,105,108,101,110,97,109,101, + 184,0,0,0,115,4,0,0,0,0,7,16,1,122,24,122, + 105,112,105,109,112,111,114,116,101,114,46,103,101,116,95,102, + 105,108,101,110,97,109,101,99,2,0,0,0,0,0,0,0, + 0,0,0,0,6,0,0,0,8,0,0,0,67,0,0,0, + 115,126,0,0,0,116,0,124,0,124,1,131,2,125,2,124, + 2,100,1,117,0,114,36,116,1,100,2,124,1,155,2,157, + 2,124,1,100,3,141,2,130,1,116,2,124,0,124,1,131, + 2,125,3,124,2,114,64,116,3,160,4,124,3,100,4,161, + 2,125,4,110,10,124,3,155,0,100,5,157,2,125,4,122, + 14,124,0,106,5,124,4,25,0,125,5,87,0,110,20,4, + 0,116,6,121,108,1,0,1,0,1,0,89,0,100,1,83, + 0,48,0,116,7,124,0,106,8,124,5,131,2,160,9,161, + 0,83,0,41,6,122,253,103,101,116,95,115,111,117,114,99, + 101,40,102,117,108,108,110,97,109,101,41,32,45,62,32,115, + 111,117,114,99,101,32,115,116,114,105,110,103,46,10,10,32, + 32,32,32,32,32,32,32,82,101,116,117,114,110,32,116,104, + 101,32,115,111,117,114,99,101,32,99,111,100,101,32,102,111, + 114,32,116,104,101,32,115,112,101,99,105,102,105,101,100,32, + 109,111,100,117,108,101,46,32,82,97,105,115,101,32,90,105, + 112,73,109,112,111,114,116,69,114,114,111,114,10,32,32,32, + 32,32,32,32,32,105,102,32,116,104,101,32,109,111,100,117, + 108,101,32,99,111,117,108,100,110,39,116,32,98,101,32,102, + 111,117,110,100,44,32,114,101,116,117,114,110,32,78,111,110, + 101,32,105,102,32,116,104,101,32,97,114,99,104,105,118,101, + 32,100,111,101,115,10,32,32,32,32,32,32,32,32,99,111, + 110,116,97,105,110,32,116,104,101,32,109,111,100,117,108,101, + 44,32,98,117,116,32,104,97,115,32,110,111,32,115,111,117, + 114,99,101,32,102,111,114,32,105,116,46,10,32,32,32,32, + 32,32,32,32,78,250,18,99,97,110,39,116,32,102,105,110, + 100,32,109,111,100,117,108,101,32,169,1,218,4,110,97,109, + 101,250,11,95,95,105,110,105,116,95,95,46,112,121,250,3, + 46,112,121,41,10,114,35,0,0,0,114,3,0,0,0,114, + 36,0,0,0,114,21,0,0,0,114,30,0,0,0,114,28, + 0,0,0,114,26,0,0,0,114,52,0,0,0,114,29,0, + 0,0,218,6,100,101,99,111,100,101,41,6,114,32,0,0, + 0,114,38,0,0,0,114,39,0,0,0,114,13,0,0,0, + 218,8,102,117,108,108,112,97,116,104,114,54,0,0,0,114, + 9,0,0,0,114,9,0,0,0,114,10,0,0,0,218,10, + 103,101,116,95,115,111,117,114,99,101,195,0,0,0,115,24, + 0,0,0,0,7,10,1,8,1,18,2,10,1,4,1,14, + 2,10,2,2,1,14,1,12,2,8,1,122,22,122,105,112, + 105,109,112,111,114,116,101,114,46,103,101,116,95,115,111,117, + 114,99,101,99,2,0,0,0,0,0,0,0,0,0,0,0, + 3,0,0,0,4,0,0,0,67,0,0,0,115,40,0,0, + 0,116,0,124,0,124,1,131,2,125,2,124,2,100,1,117, + 0,114,36,116,1,100,2,124,1,155,2,157,2,124,1,100, + 3,141,2,130,1,124,2,83,0,41,4,122,171,105,115,95, + 112,97,99,107,97,103,101,40,102,117,108,108,110,97,109,101, + 41,32,45,62,32,98,111,111,108,46,10,10,32,32,32,32, + 32,32,32,32,82,101,116,117,114,110,32,84,114,117,101,32, + 105,102,32,116,104,101,32,109,111,100,117,108,101,32,115,112, + 101,99,105,102,105,101,100,32,98,121,32,102,117,108,108,110, + 97,109,101,32,105,115,32,97,32,112,97,99,107,97,103,101, + 46,10,32,32,32,32,32,32,32,32,82,97,105,115,101,32, + 90,105,112,73,109,112,111,114,116,69,114,114,111,114,32,105, + 102,32,116,104,101,32,109,111,100,117,108,101,32,99,111,117, + 108,100,110,39,116,32,98,101,32,102,111,117,110,100,46,10, + 32,32,32,32,32,32,32,32,78,114,57,0,0,0,114,58, + 0,0,0,41,2,114,35,0,0,0,114,3,0,0,0,41, + 3,114,32,0,0,0,114,38,0,0,0,114,39,0,0,0, 114,9,0,0,0,114,9,0,0,0,114,10,0,0,0,218, - 9,95,101,113,95,109,116,105,109,101,65,2,0,0,115,2, - 0,0,0,0,2,114,147,0,0,0,99,5,0,0,0,0, - 0,0,0,0,0,0,0,14,0,0,0,8,0,0,0,67, - 0,0,0,115,56,1,0,0,124,3,124,2,100,1,156,2, - 125,5,122,18,116,0,160,1,124,4,124,3,124,5,161,3, - 125,6,87,0,110,20,4,0,116,2,121,48,1,0,1,0, - 1,0,89,0,100,0,83,0,48,0,124,6,100,2,64,0, - 100,3,107,3,125,7,124,7,114,178,124,6,100,4,64,0, - 100,3,107,3,125,8,116,3,106,4,100,5,107,3,114,176, - 124,8,115,102,116,3,106,4,100,6,107,2,114,176,116,5, - 124,0,124,2,131,2,125,9,124,9,100,0,117,1,114,176, - 116,3,160,6,116,0,106,7,124,9,161,2,125,10,122,20, - 116,0,160,8,124,4,124,10,124,3,124,5,161,4,1,0, - 87,0,110,20,4,0,116,2,121,174,1,0,1,0,1,0, - 89,0,100,0,83,0,48,0,110,84,116,9,124,0,124,2, - 131,2,92,2,125,11,125,12,124,11,144,1,114,6,116,10, - 116,11,124,4,100,7,100,8,133,2,25,0,131,1,124,11, - 131,2,114,242,116,11,124,4,100,8,100,9,133,2,25,0, - 131,1,124,12,107,3,144,1,114,6,116,12,160,13,100,10, - 124,3,155,2,157,2,161,1,1,0,100,0,83,0,116,14, - 160,15,124,4,100,9,100,0,133,2,25,0,161,1,125,13, - 116,16,124,13,116,17,131,2,144,1,115,52,116,18,100,11, - 124,1,155,2,100,12,157,3,131,1,130,1,124,13,83,0, - 41,13,78,41,2,114,59,0,0,0,114,13,0,0,0,114, - 5,0,0,0,114,0,0,0,0,114,86,0,0,0,90,5, - 110,101,118,101,114,90,6,97,108,119,97,121,115,114,99,0, - 0,0,114,94,0,0,0,114,95,0,0,0,122,22,98,121, - 116,101,99,111,100,101,32,105,115,32,115,116,97,108,101,32, - 102,111,114,32,122,16,99,111,109,112,105,108,101,100,32,109, - 111,100,117,108,101,32,122,21,32,105,115,32,110,111,116,32, - 97,32,99,111,100,101,32,111,98,106,101,99,116,41,19,114, - 21,0,0,0,90,13,95,99,108,97,115,115,105,102,121,95, - 112,121,99,114,75,0,0,0,218,4,95,105,109,112,90,21, - 99,104,101,99,107,95,104,97,115,104,95,98,97,115,101,100, - 95,112,121,99,115,218,15,95,103,101,116,95,112,121,99,95, - 115,111,117,114,99,101,218,11,115,111,117,114,99,101,95,104, - 97,115,104,90,17,95,82,65,87,95,77,65,71,73,67,95, - 78,85,77,66,69,82,90,18,95,118,97,108,105,100,97,116, - 101,95,104,97,115,104,95,112,121,99,218,29,95,103,101,116, - 95,109,116,105,109,101,95,97,110,100,95,115,105,122,101,95, - 111,102,95,115,111,117,114,99,101,114,147,0,0,0,114,2, - 0,0,0,114,76,0,0,0,114,77,0,0,0,218,7,109, - 97,114,115,104,97,108,90,5,108,111,97,100,115,114,15,0, - 0,0,218,10,95,99,111,100,101,95,116,121,112,101,218,9, - 84,121,112,101,69,114,114,111,114,41,14,114,32,0,0,0, - 114,53,0,0,0,114,63,0,0,0,114,38,0,0,0,114, - 126,0,0,0,90,11,101,120,99,95,100,101,116,97,105,108, - 115,114,129,0,0,0,90,10,104,97,115,104,95,98,97,115, - 101,100,90,12,99,104,101,99,107,95,115,111,117,114,99,101, - 90,12,115,111,117,114,99,101,95,98,121,116,101,115,114,150, - 0,0,0,90,12,115,111,117,114,99,101,95,109,116,105,109, - 101,90,11,115,111,117,114,99,101,95,115,105,122,101,114,46, - 0,0,0,114,9,0,0,0,114,9,0,0,0,114,10,0, - 0,0,218,15,95,117,110,109,97,114,115,104,97,108,95,99, - 111,100,101,75,2,0,0,115,82,0,0,0,0,2,2,1, - 2,254,6,5,2,1,18,1,12,1,8,2,12,1,4,1, - 12,1,10,1,2,255,2,1,8,255,2,2,10,1,8,1, - 4,1,4,1,2,254,4,5,2,1,4,1,8,255,8,2, - 12,1,10,3,8,255,6,3,6,3,22,1,18,255,4,2, - 4,1,8,255,4,2,4,2,18,1,12,1,16,1,114,155, - 0,0,0,99,1,0,0,0,0,0,0,0,0,0,0,0, - 1,0,0,0,4,0,0,0,67,0,0,0,115,28,0,0, - 0,124,0,160,0,100,1,100,2,161,2,125,0,124,0,160, - 0,100,3,100,2,161,2,125,0,124,0,83,0,41,4,78, - 115,2,0,0,0,13,10,243,1,0,0,0,10,243,1,0, - 0,0,13,41,1,114,19,0,0,0,41,1,218,6,115,111, - 117,114,99,101,114,9,0,0,0,114,9,0,0,0,114,10, - 0,0,0,218,23,95,110,111,114,109,97,108,105,122,101,95, - 108,105,110,101,95,101,110,100,105,110,103,115,126,2,0,0, - 115,6,0,0,0,0,1,12,1,12,1,114,159,0,0,0, - 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,6,0,0,0,67,0,0,0,115,24,0,0,0,116,0, - 124,1,131,1,125,1,116,1,124,1,124,0,100,1,100,2, - 100,3,141,4,83,0,41,4,78,114,74,0,0,0,84,41, - 1,90,12,100,111,110,116,95,105,110,104,101,114,105,116,41, - 2,114,159,0,0,0,218,7,99,111,109,112,105,108,101,41, - 2,114,53,0,0,0,114,158,0,0,0,114,9,0,0,0, - 114,9,0,0,0,114,10,0,0,0,218,15,95,99,111,109, - 112,105,108,101,95,115,111,117,114,99,101,133,2,0,0,115, - 4,0,0,0,0,1,8,1,114,161,0,0,0,99,2,0, - 0,0,0,0,0,0,0,0,0,0,2,0,0,0,11,0, - 0,0,67,0,0,0,115,68,0,0,0,116,0,160,1,124, - 0,100,1,63,0,100,2,23,0,124,0,100,3,63,0,100, - 4,64,0,124,0,100,5,64,0,124,1,100,6,63,0,124, - 1,100,3,63,0,100,7,64,0,124,1,100,5,64,0,100, - 8,20,0,100,9,100,9,100,9,102,9,161,1,83,0,41, - 10,78,233,9,0,0,0,105,188,7,0,0,233,5,0,0, - 0,233,15,0,0,0,233,31,0,0,0,233,11,0,0,0, - 233,63,0,0,0,114,86,0,0,0,114,14,0,0,0,41, - 2,114,131,0,0,0,90,6,109,107,116,105,109,101,41,2, - 218,1,100,114,138,0,0,0,114,9,0,0,0,114,9,0, - 0,0,114,10,0,0,0,218,14,95,112,97,114,115,101,95, - 100,111,115,116,105,109,101,139,2,0,0,115,18,0,0,0, - 0,1,4,1,10,1,10,1,6,1,6,1,10,1,10,1, - 6,249,114,169,0,0,0,99,2,0,0,0,0,0,0,0, - 0,0,0,0,6,0,0,0,10,0,0,0,67,0,0,0, - 115,114,0,0,0,122,82,124,1,100,1,100,0,133,2,25, - 0,100,2,118,0,115,22,74,0,130,1,124,1,100,0,100, - 1,133,2,25,0,125,1,124,0,106,0,124,1,25,0,125, - 2,124,2,100,3,25,0,125,3,124,2,100,4,25,0,125, - 4,124,2,100,5,25,0,125,5,116,1,124,4,124,3,131, - 2,124,5,102,2,87,0,83,0,4,0,116,2,116,3,116, - 4,102,3,121,108,1,0,1,0,1,0,89,0,100,6,83, - 0,48,0,100,0,83,0,41,7,78,114,14,0,0,0,169, - 2,218,1,99,218,1,111,114,163,0,0,0,233,6,0,0, - 0,233,3,0,0,0,41,2,114,0,0,0,0,114,0,0, - 0,0,41,5,114,28,0,0,0,114,169,0,0,0,114,26, - 0,0,0,218,10,73,110,100,101,120,69,114,114,111,114,114, - 154,0,0,0,41,6,114,32,0,0,0,114,13,0,0,0, - 114,54,0,0,0,114,131,0,0,0,114,132,0,0,0,90, - 17,117,110,99,111,109,112,114,101,115,115,101,100,95,115,105, - 122,101,114,9,0,0,0,114,9,0,0,0,114,10,0,0, - 0,114,151,0,0,0,152,2,0,0,115,20,0,0,0,0, - 1,2,2,20,1,12,1,10,3,8,1,8,1,8,1,16, - 1,18,1,114,151,0,0,0,99,2,0,0,0,0,0,0, + 10,105,115,95,112,97,99,107,97,103,101,221,0,0,0,115, + 8,0,0,0,0,6,10,1,8,1,18,1,122,22,122,105, + 112,105,109,112,111,114,116,101,114,46,105,115,95,112,97,99, + 107,97,103,101,99,2,0,0,0,0,0,0,0,0,0,0, + 0,8,0,0,0,8,0,0,0,67,0,0,0,115,246,0, + 0,0,116,0,124,0,124,1,131,2,92,3,125,2,125,3, + 125,4,116,1,106,2,160,3,124,1,161,1,125,5,124,5, + 100,1,117,0,115,46,116,4,124,5,116,5,131,2,115,64, + 116,5,124,1,131,1,125,5,124,5,116,1,106,2,124,1, + 60,0,124,0,124,5,95,6,122,84,124,3,114,108,116,7, + 124,0,124,1,131,2,125,6,116,8,160,9,124,0,106,10, + 124,6,161,2,125,7,124,7,103,1,124,5,95,11,116,12, + 124,5,100,2,131,2,115,124,116,13,124,5,95,13,116,8, + 160,14,124,5,106,15,124,1,124,4,161,3,1,0,116,16, + 124,2,124,5,106,15,131,2,1,0,87,0,110,22,1,0, + 1,0,1,0,116,1,106,2,124,1,61,0,130,0,89,0, + 110,2,48,0,122,14,116,1,106,2,124,1,25,0,125,5, + 87,0,110,34,4,0,116,17,121,226,1,0,1,0,1,0, + 116,18,100,3,124,1,155,2,100,4,157,3,131,1,130,1, + 89,0,110,2,48,0,116,19,160,20,100,5,124,1,124,4, + 161,3,1,0,124,5,83,0,41,6,122,245,108,111,97,100, + 95,109,111,100,117,108,101,40,102,117,108,108,110,97,109,101, + 41,32,45,62,32,109,111,100,117,108,101,46,10,10,32,32, + 32,32,32,32,32,32,76,111,97,100,32,116,104,101,32,109, + 111,100,117,108,101,32,115,112,101,99,105,102,105,101,100,32, + 98,121,32,39,102,117,108,108,110,97,109,101,39,46,32,39, + 102,117,108,108,110,97,109,101,39,32,109,117,115,116,32,98, + 101,32,116,104,101,10,32,32,32,32,32,32,32,32,102,117, + 108,108,121,32,113,117,97,108,105,102,105,101,100,32,40,100, + 111,116,116,101,100,41,32,109,111,100,117,108,101,32,110,97, + 109,101,46,32,73,116,32,114,101,116,117,114,110,115,32,116, + 104,101,32,105,109,112,111,114,116,101,100,10,32,32,32,32, + 32,32,32,32,109,111,100,117,108,101,44,32,111,114,32,114, + 97,105,115,101,115,32,90,105,112,73,109,112,111,114,116,69, + 114,114,111,114,32,105,102,32,105,116,32,119,97,115,110,39, + 116,32,102,111,117,110,100,46,10,32,32,32,32,32,32,32, + 32,78,218,12,95,95,98,117,105,108,116,105,110,115,95,95, + 122,14,76,111,97,100,101,100,32,109,111,100,117,108,101,32, + 122,25,32,110,111,116,32,102,111,117,110,100,32,105,110,32, + 115,121,115,46,109,111,100,117,108,101,115,122,30,105,109,112, + 111,114,116,32,123,125,32,35,32,108,111,97,100,101,100,32, + 102,114,111,109,32,90,105,112,32,123,125,41,21,114,44,0, + 0,0,218,3,115,121,115,218,7,109,111,100,117,108,101,115, + 218,3,103,101,116,114,15,0,0,0,218,12,95,109,111,100, + 117,108,101,95,116,121,112,101,218,10,95,95,108,111,97,100, + 101,114,95,95,114,36,0,0,0,114,21,0,0,0,114,30, + 0,0,0,114,29,0,0,0,90,8,95,95,112,97,116,104, + 95,95,218,7,104,97,115,97,116,116,114,114,66,0,0,0, + 90,14,95,102,105,120,95,117,112,95,109,111,100,117,108,101, + 218,8,95,95,100,105,99,116,95,95,218,4,101,120,101,99, + 114,26,0,0,0,218,11,73,109,112,111,114,116,69,114,114, + 111,114,218,10,95,98,111,111,116,115,116,114,97,112,218,16, + 95,118,101,114,98,111,115,101,95,109,101,115,115,97,103,101, + 41,8,114,32,0,0,0,114,38,0,0,0,114,46,0,0, + 0,114,47,0,0,0,114,40,0,0,0,90,3,109,111,100, + 114,13,0,0,0,114,63,0,0,0,114,9,0,0,0,114, + 9,0,0,0,114,10,0,0,0,218,11,108,111,97,100,95, + 109,111,100,117,108,101,234,0,0,0,115,48,0,0,0,0, + 7,16,1,12,1,18,1,8,1,10,1,6,2,2,1,4, + 3,10,1,14,1,8,2,10,1,6,1,16,1,16,1,6, + 1,8,1,8,2,2,1,14,1,12,1,22,1,14,1,122, + 23,122,105,112,105,109,112,111,114,116,101,114,46,108,111,97, + 100,95,109,111,100,117,108,101,99,2,0,0,0,0,0,0, 0,0,0,0,0,3,0,0,0,8,0,0,0,67,0,0, - 0,115,84,0,0,0,124,1,100,1,100,0,133,2,25,0, - 100,2,118,0,115,20,74,0,130,1,124,1,100,0,100,1, - 133,2,25,0,125,1,122,14,124,0,106,0,124,1,25,0, - 125,2,87,0,110,20,4,0,116,1,121,66,1,0,1,0, - 1,0,89,0,100,0,83,0,48,0,116,2,124,0,106,3, - 124,2,131,2,83,0,100,0,83,0,41,3,78,114,14,0, - 0,0,114,170,0,0,0,41,4,114,28,0,0,0,114,26, - 0,0,0,114,52,0,0,0,114,29,0,0,0,41,3,114, - 32,0,0,0,114,13,0,0,0,114,54,0,0,0,114,9, - 0,0,0,114,9,0,0,0,114,10,0,0,0,114,149,0, - 0,0,171,2,0,0,115,14,0,0,0,0,2,20,1,12, - 2,2,1,14,1,12,1,8,2,114,149,0,0,0,99,2, - 0,0,0,0,0,0,0,0,0,0,0,11,0,0,0,9, - 0,0,0,67,0,0,0,115,196,0,0,0,116,0,124,0, - 124,1,131,2,125,2,116,1,68,0,93,158,92,3,125,3, - 125,4,125,5,124,2,124,3,23,0,125,6,116,2,106,3, - 100,1,124,0,106,4,116,5,124,6,100,2,100,3,141,5, - 1,0,122,14,124,0,106,6,124,6,25,0,125,7,87,0, - 110,18,4,0,116,7,121,86,1,0,1,0,1,0,89,0, - 113,14,48,0,124,7,100,4,25,0,125,8,116,8,124,0, - 106,4,124,7,131,2,125,9,124,4,114,130,116,9,124,0, - 124,8,124,6,124,1,124,9,131,5,125,10,110,10,116,10, - 124,8,124,9,131,2,125,10,124,10,100,0,117,0,114,150, - 113,14,124,7,100,4,25,0,125,8,124,10,124,5,124,8, - 102,3,2,0,1,0,83,0,113,14,116,11,100,5,124,1, - 155,2,157,2,124,1,100,6,141,2,130,1,100,0,83,0, - 41,7,78,122,13,116,114,121,105,110,103,32,123,125,123,125, - 123,125,114,86,0,0,0,41,1,90,9,118,101,114,98,111, - 115,105,116,121,114,0,0,0,0,114,57,0,0,0,114,58, - 0,0,0,41,12,114,36,0,0,0,114,89,0,0,0,114, - 76,0,0,0,114,77,0,0,0,114,29,0,0,0,114,20, - 0,0,0,114,28,0,0,0,114,26,0,0,0,114,52,0, - 0,0,114,155,0,0,0,114,161,0,0,0,114,3,0,0, - 0,41,11,114,32,0,0,0,114,38,0,0,0,114,13,0, - 0,0,114,90,0,0,0,114,91,0,0,0,114,47,0,0, - 0,114,63,0,0,0,114,54,0,0,0,114,40,0,0,0, - 114,126,0,0,0,114,46,0,0,0,114,9,0,0,0,114, - 9,0,0,0,114,10,0,0,0,114,44,0,0,0,186,2, - 0,0,115,36,0,0,0,0,1,10,1,14,1,8,1,22, - 1,2,1,14,1,12,1,6,2,8,1,12,1,4,1,18, - 2,10,1,8,3,2,1,8,1,16,2,114,44,0,0,0, - 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,64,0,0,0,115,60,0,0,0,101,0, - 90,1,100,0,90,2,100,1,90,3,100,2,90,4,100,3, - 100,4,132,0,90,5,100,5,100,6,132,0,90,6,100,7, - 100,8,132,0,90,7,100,9,100,10,132,0,90,8,100,11, - 100,12,132,0,90,9,100,13,83,0,41,14,114,80,0,0, - 0,122,165,80,114,105,118,97,116,101,32,99,108,97,115,115, - 32,117,115,101,100,32,116,111,32,115,117,112,112,111,114,116, - 32,90,105,112,73,109,112,111,114,116,46,103,101,116,95,114, - 101,115,111,117,114,99,101,95,114,101,97,100,101,114,40,41, - 46,10,10,32,32,32,32,84,104,105,115,32,99,108,97,115, - 115,32,105,115,32,97,108,108,111,119,101,100,32,116,111,32, - 114,101,102,101,114,101,110,99,101,32,97,108,108,32,116,104, - 101,32,105,110,110,97,114,100,115,32,97,110,100,32,112,114, - 105,118,97,116,101,32,112,97,114,116,115,32,111,102,10,32, - 32,32,32,116,104,101,32,122,105,112,105,109,112,111,114,116, - 101,114,46,10,32,32,32,32,70,99,3,0,0,0,0,0, + 0,115,64,0,0,0,122,20,124,0,160,0,124,1,161,1, + 115,18,87,0,100,1,83,0,87,0,110,20,4,0,116,1, + 121,40,1,0,1,0,1,0,89,0,100,1,83,0,48,0, + 100,2,100,3,108,2,109,3,125,2,1,0,124,2,124,0, + 124,1,131,2,83,0,41,4,122,204,82,101,116,117,114,110, + 32,116,104,101,32,82,101,115,111,117,114,99,101,82,101,97, + 100,101,114,32,102,111,114,32,97,32,112,97,99,107,97,103, + 101,32,105,110,32,97,32,122,105,112,32,102,105,108,101,46, + 10,10,32,32,32,32,32,32,32,32,73,102,32,39,102,117, + 108,108,110,97,109,101,39,32,105,115,32,97,32,112,97,99, + 107,97,103,101,32,119,105,116,104,105,110,32,116,104,101,32, + 122,105,112,32,102,105,108,101,44,32,114,101,116,117,114,110, + 32,116,104,101,10,32,32,32,32,32,32,32,32,39,82,101, + 115,111,117,114,99,101,82,101,97,100,101,114,39,32,111,98, + 106,101,99,116,32,102,111,114,32,116,104,101,32,112,97,99, + 107,97,103,101,46,32,32,79,116,104,101,114,119,105,115,101, + 32,114,101,116,117,114,110,32,78,111,110,101,46,10,32,32, + 32,32,32,32,32,32,78,114,0,0,0,0,41,1,218,9, + 90,105,112,82,101,97,100,101,114,41,4,114,65,0,0,0, + 114,3,0,0,0,90,17,105,109,112,111,114,116,108,105,98, + 46,114,101,97,100,101,114,115,114,79,0,0,0,41,3,114, + 32,0,0,0,114,38,0,0,0,114,79,0,0,0,114,9, + 0,0,0,114,9,0,0,0,114,10,0,0,0,218,19,103, + 101,116,95,114,101,115,111,117,114,99,101,95,114,101,97,100, + 101,114,16,1,0,0,115,14,0,0,0,0,6,2,1,10, + 1,10,1,12,1,8,1,12,1,122,31,122,105,112,105,109, + 112,111,114,116,101,114,46,103,101,116,95,114,101,115,111,117, + 114,99,101,95,114,101,97,100,101,114,99,1,0,0,0,0, + 0,0,0,0,0,0,0,1,0,0,0,5,0,0,0,67, + 0,0,0,115,24,0,0,0,100,1,124,0,106,0,155,0, + 116,1,155,0,124,0,106,2,155,0,100,2,157,5,83,0, + 41,3,78,122,21,60,122,105,112,105,109,112,111,114,116,101, + 114,32,111,98,106,101,99,116,32,34,122,2,34,62,41,3, + 114,29,0,0,0,114,20,0,0,0,114,31,0,0,0,41, + 1,114,32,0,0,0,114,9,0,0,0,114,9,0,0,0, + 114,10,0,0,0,218,8,95,95,114,101,112,114,95,95,31, + 1,0,0,115,2,0,0,0,0,1,122,20,122,105,112,105, + 109,112,111,114,116,101,114,46,95,95,114,101,112,114,95,95, + 41,1,78,41,1,78,41,15,114,6,0,0,0,114,7,0, + 0,0,114,8,0,0,0,218,7,95,95,100,111,99,95,95, + 114,34,0,0,0,114,41,0,0,0,114,42,0,0,0,114, + 48,0,0,0,114,55,0,0,0,114,56,0,0,0,114,64, + 0,0,0,114,65,0,0,0,114,78,0,0,0,114,80,0, + 0,0,114,81,0,0,0,114,9,0,0,0,114,9,0,0, + 0,114,9,0,0,0,114,10,0,0,0,114,4,0,0,0, + 45,0,0,0,115,24,0,0,0,8,1,4,17,8,46,10, + 32,10,12,8,10,8,21,8,11,8,26,8,13,8,38,8, + 15,122,12,95,95,105,110,105,116,95,95,46,112,121,99,84, + 114,60,0,0,0,70,41,3,122,4,46,112,121,99,84,70, + 41,3,114,61,0,0,0,70,70,99,2,0,0,0,0,0, + 0,0,0,0,0,0,2,0,0,0,4,0,0,0,67,0, + 0,0,115,20,0,0,0,124,0,106,0,124,1,160,1,100, + 1,161,1,100,2,25,0,23,0,83,0,41,3,78,218,1, + 46,233,2,0,0,0,41,2,114,31,0,0,0,218,10,114, + 112,97,114,116,105,116,105,111,110,41,2,114,32,0,0,0, + 114,38,0,0,0,114,9,0,0,0,114,9,0,0,0,114, + 10,0,0,0,114,36,0,0,0,49,1,0,0,115,2,0, + 0,0,0,1,114,36,0,0,0,99,2,0,0,0,0,0, 0,0,0,0,0,0,3,0,0,0,2,0,0,0,67,0, - 0,0,115,16,0,0,0,124,1,124,0,95,0,124,2,124, - 0,95,1,100,0,83,0,114,88,0,0,0,41,2,114,4, - 0,0,0,114,38,0,0,0,41,3,114,32,0,0,0,114, - 4,0,0,0,114,38,0,0,0,114,9,0,0,0,114,9, - 0,0,0,114,10,0,0,0,114,34,0,0,0,220,2,0, - 0,115,4,0,0,0,0,1,6,1,122,33,95,90,105,112, - 73,109,112,111,114,116,82,101,115,111,117,114,99,101,82,101, - 97,100,101,114,46,95,95,105,110,105,116,95,95,99,2,0, - 0,0,0,0,0,0,0,0,0,0,5,0,0,0,8,0, - 0,0,67,0,0,0,115,90,0,0,0,124,0,106,0,160, - 1,100,1,100,2,161,2,125,2,124,2,155,0,100,2,124, - 1,155,0,157,3,125,3,100,3,100,4,108,2,109,3,125, - 4,1,0,122,18,124,4,124,0,106,4,160,5,124,3,161, - 1,131,1,87,0,83,0,4,0,116,6,121,84,1,0,1, - 0,1,0,116,7,124,3,131,1,130,1,89,0,110,2,48, - 0,100,0,83,0,41,5,78,114,85,0,0,0,114,109,0, - 0,0,114,0,0,0,0,41,1,218,7,66,121,116,101,115, - 73,79,41,8,114,38,0,0,0,114,19,0,0,0,90,2, - 105,111,114,176,0,0,0,114,4,0,0,0,114,55,0,0, - 0,114,22,0,0,0,218,17,70,105,108,101,78,111,116,70, - 111,117,110,100,69,114,114,111,114,41,5,114,32,0,0,0, - 218,8,114,101,115,111,117,114,99,101,218,16,102,117,108,108, - 110,97,109,101,95,97,115,95,112,97,116,104,114,13,0,0, - 0,114,176,0,0,0,114,9,0,0,0,114,9,0,0,0, - 114,10,0,0,0,218,13,111,112,101,110,95,114,101,115,111, - 117,114,99,101,224,2,0,0,115,14,0,0,0,0,1,14, - 1,14,1,12,1,2,1,18,1,12,1,122,38,95,90,105, - 112,73,109,112,111,114,116,82,101,115,111,117,114,99,101,82, - 101,97,100,101,114,46,111,112,101,110,95,114,101,115,111,117, - 114,99,101,99,2,0,0,0,0,0,0,0,0,0,0,0, - 2,0,0,0,1,0,0,0,67,0,0,0,115,8,0,0, - 0,116,0,130,1,100,0,83,0,114,88,0,0,0,41,1, - 114,177,0,0,0,41,2,114,32,0,0,0,114,178,0,0, + 0,0,115,18,0,0,0,124,1,116,0,23,0,125,2,124, + 2,124,0,106,1,118,0,83,0,169,1,78,41,2,114,20, + 0,0,0,114,28,0,0,0,41,3,114,32,0,0,0,114, + 13,0,0,0,90,7,100,105,114,112,97,116,104,114,9,0, + 0,0,114,9,0,0,0,114,10,0,0,0,114,37,0,0, + 0,53,1,0,0,115,4,0,0,0,0,4,8,2,114,37, + 0,0,0,99,2,0,0,0,0,0,0,0,0,0,0,0, + 7,0,0,0,4,0,0,0,67,0,0,0,115,56,0,0, + 0,116,0,124,0,124,1,131,2,125,2,116,1,68,0,93, + 36,92,3,125,3,125,4,125,5,124,2,124,3,23,0,125, + 6,124,6,124,0,106,2,118,0,114,14,124,5,2,0,1, + 0,83,0,113,14,100,0,83,0,114,86,0,0,0,41,3, + 114,36,0,0,0,218,16,95,122,105,112,95,115,101,97,114, + 99,104,111,114,100,101,114,114,28,0,0,0,41,7,114,32, + 0,0,0,114,38,0,0,0,114,13,0,0,0,218,6,115, + 117,102,102,105,120,218,10,105,115,98,121,116,101,99,111,100, + 101,114,47,0,0,0,114,63,0,0,0,114,9,0,0,0, + 114,9,0,0,0,114,10,0,0,0,114,35,0,0,0,62, + 1,0,0,115,12,0,0,0,0,1,10,1,14,1,8,1, + 10,1,10,1,114,35,0,0,0,99,1,0,0,0,0,0, + 0,0,0,0,0,0,26,0,0,0,9,0,0,0,67,0, + 0,0,115,2,5,0,0,122,14,116,0,160,1,124,0,161, + 1,125,1,87,0,110,36,4,0,116,2,121,50,1,0,1, + 0,1,0,116,3,100,1,124,0,155,2,157,2,124,0,100, + 2,141,2,130,1,89,0,110,2,48,0,124,1,144,4,143, + 164,1,0,122,36,124,1,160,4,116,5,11,0,100,3,161, + 2,1,0,124,1,160,6,161,0,125,2,124,1,160,7,116, + 5,161,1,125,3,87,0,110,36,4,0,116,2,121,132,1, + 0,1,0,1,0,116,3,100,4,124,0,155,2,157,2,124, + 0,100,2,141,2,130,1,89,0,110,2,48,0,116,8,124, + 3,131,1,116,5,107,3,114,164,116,3,100,4,124,0,155, + 2,157,2,124,0,100,2,141,2,130,1,124,3,100,0,100, + 5,133,2,25,0,116,9,107,3,144,1,114,170,122,24,124, + 1,160,4,100,6,100,3,161,2,1,0,124,1,160,6,161, + 0,125,4,87,0,110,36,4,0,116,2,121,242,1,0,1, + 0,1,0,116,3,100,4,124,0,155,2,157,2,124,0,100, + 2,141,2,130,1,89,0,110,2,48,0,116,10,124,4,116, + 11,24,0,116,5,24,0,100,6,131,2,125,5,122,22,124, + 1,160,4,124,5,161,1,1,0,124,1,160,7,161,0,125, + 6,87,0,110,38,4,0,116,2,144,1,121,66,1,0,1, + 0,1,0,116,3,100,4,124,0,155,2,157,2,124,0,100, + 2,141,2,130,1,89,0,110,2,48,0,124,6,160,12,116, + 9,161,1,125,7,124,7,100,6,107,0,144,1,114,106,116, + 3,100,7,124,0,155,2,157,2,124,0,100,2,141,2,130, + 1,124,6,124,7,124,7,116,5,23,0,133,2,25,0,125, + 3,116,8,124,3,131,1,116,5,107,3,144,1,114,154,116, + 3,100,8,124,0,155,2,157,2,124,0,100,2,141,2,130, + 1,124,4,116,8,124,6,131,1,24,0,124,7,23,0,125, + 2,116,13,124,3,100,9,100,10,133,2,25,0,131,1,125, + 8,116,13,124,3,100,10,100,11,133,2,25,0,131,1,125, + 9,124,2,124,8,107,0,144,1,114,230,116,3,100,12,124, + 0,155,2,157,2,124,0,100,2,141,2,130,1,124,2,124, + 9,107,0,144,2,114,2,116,3,100,13,124,0,155,2,157, + 2,124,0,100,2,141,2,130,1,124,2,124,8,56,0,125, + 2,124,2,124,9,24,0,125,10,124,10,100,6,107,0,144, + 2,114,46,116,3,100,14,124,0,155,2,157,2,124,0,100, + 2,141,2,130,1,105,0,125,11,100,6,125,12,122,14,124, + 1,160,4,124,2,161,1,1,0,87,0,110,38,4,0,116, + 2,144,2,121,106,1,0,1,0,1,0,116,3,100,4,124, + 0,155,2,157,2,124,0,100,2,141,2,130,1,89,0,110, + 2,48,0,124,1,160,7,100,15,161,1,125,3,116,8,124, + 3,131,1,100,5,107,0,144,2,114,140,116,14,100,16,131, + 1,130,1,124,3,100,0,100,5,133,2,25,0,100,17,107, + 3,144,2,114,162,144,4,113,208,116,8,124,3,131,1,100, + 15,107,3,144,2,114,184,116,14,100,16,131,1,130,1,116, + 15,124,3,100,18,100,19,133,2,25,0,131,1,125,13,116, + 15,124,3,100,19,100,9,133,2,25,0,131,1,125,14,116, + 15,124,3,100,9,100,20,133,2,25,0,131,1,125,15,116, + 15,124,3,100,20,100,10,133,2,25,0,131,1,125,16,116, + 13,124,3,100,10,100,11,133,2,25,0,131,1,125,17,116, + 13,124,3,100,11,100,21,133,2,25,0,131,1,125,18,116, + 13,124,3,100,21,100,22,133,2,25,0,131,1,125,4,116, + 15,124,3,100,22,100,23,133,2,25,0,131,1,125,19,116, + 15,124,3,100,23,100,24,133,2,25,0,131,1,125,20,116, + 15,124,3,100,24,100,25,133,2,25,0,131,1,125,21,116, + 13,124,3,100,26,100,15,133,2,25,0,131,1,125,22,124, + 19,124,20,23,0,124,21,23,0,125,8,124,22,124,9,107, + 4,144,3,114,144,116,3,100,27,124,0,155,2,157,2,124, + 0,100,2,141,2,130,1,124,22,124,10,55,0,125,22,122, + 14,124,1,160,7,124,19,161,1,125,23,87,0,110,38,4, + 0,116,2,144,3,121,204,1,0,1,0,1,0,116,3,100, + 4,124,0,155,2,157,2,124,0,100,2,141,2,130,1,89, + 0,110,2,48,0,116,8,124,23,131,1,124,19,107,3,144, + 3,114,238,116,3,100,4,124,0,155,2,157,2,124,0,100, + 2,141,2,130,1,122,50,116,8,124,1,160,7,124,8,124, + 19,24,0,161,1,131,1,124,8,124,19,24,0,107,3,144, + 4,114,30,116,3,100,4,124,0,155,2,157,2,124,0,100, + 2,141,2,130,1,87,0,110,38,4,0,116,2,144,4,121, + 70,1,0,1,0,1,0,116,3,100,4,124,0,155,2,157, + 2,124,0,100,2,141,2,130,1,89,0,110,2,48,0,124, + 13,100,28,64,0,144,4,114,92,124,23,160,16,161,0,125, + 23,110,52,122,14,124,23,160,16,100,29,161,1,125,23,87, + 0,110,36,4,0,116,17,144,4,121,142,1,0,1,0,1, + 0,124,23,160,16,100,30,161,1,160,18,116,19,161,1,125, + 23,89,0,110,2,48,0,124,23,160,20,100,31,116,21,161, + 2,125,23,116,22,160,23,124,0,124,23,161,2,125,24,124, + 24,124,14,124,18,124,4,124,22,124,15,124,16,124,17,102, + 8,125,25,124,25,124,11,124,23,60,0,124,12,100,32,55, + 0,125,12,144,2,113,108,87,0,100,0,4,0,4,0,131, + 3,1,0,110,18,49,0,144,4,115,230,48,0,1,0,1, + 0,1,0,89,0,1,0,116,24,160,25,100,33,124,12,124, + 0,161,3,1,0,124,11,83,0,41,34,78,122,21,99,97, + 110,39,116,32,111,112,101,110,32,90,105,112,32,102,105,108, + 101,58,32,114,12,0,0,0,114,84,0,0,0,250,21,99, + 97,110,39,116,32,114,101,97,100,32,90,105,112,32,102,105, + 108,101,58,32,233,4,0,0,0,114,0,0,0,0,122,16, + 110,111,116,32,97,32,90,105,112,32,102,105,108,101,58,32, + 122,18,99,111,114,114,117,112,116,32,90,105,112,32,102,105, + 108,101,58,32,233,12,0,0,0,233,16,0,0,0,233,20, + 0,0,0,122,28,98,97,100,32,99,101,110,116,114,97,108, + 32,100,105,114,101,99,116,111,114,121,32,115,105,122,101,58, + 32,122,30,98,97,100,32,99,101,110,116,114,97,108,32,100, + 105,114,101,99,116,111,114,121,32,111,102,102,115,101,116,58, + 32,122,38,98,97,100,32,99,101,110,116,114,97,108,32,100, + 105,114,101,99,116,111,114,121,32,115,105,122,101,32,111,114, + 32,111,102,102,115,101,116,58,32,233,46,0,0,0,250,27, + 69,79,70,32,114,101,97,100,32,119,104,101,114,101,32,110, + 111,116,32,101,120,112,101,99,116,101,100,115,4,0,0,0, + 80,75,1,2,233,8,0,0,0,233,10,0,0,0,233,14, + 0,0,0,233,24,0,0,0,233,28,0,0,0,233,30,0, + 0,0,233,32,0,0,0,233,34,0,0,0,233,42,0,0, + 0,122,25,98,97,100,32,108,111,99,97,108,32,104,101,97, + 100,101,114,32,111,102,102,115,101,116,58,32,105,0,8,0, + 0,218,5,97,115,99,105,105,90,6,108,97,116,105,110,49, + 250,1,47,114,5,0,0,0,122,33,122,105,112,105,109,112, + 111,114,116,58,32,102,111,117,110,100,32,123,125,32,110,97, + 109,101,115,32,105,110,32,123,33,114,125,41,26,218,3,95, + 105,111,218,9,111,112,101,110,95,99,111,100,101,114,22,0, + 0,0,114,3,0,0,0,218,4,115,101,101,107,218,20,69, + 78,68,95,67,69,78,84,82,65,76,95,68,73,82,95,83, + 73,90,69,90,4,116,101,108,108,218,4,114,101,97,100,114, + 51,0,0,0,218,18,83,84,82,73,78,71,95,69,78,68, + 95,65,82,67,72,73,86,69,218,3,109,97,120,218,15,77, + 65,88,95,67,79,77,77,69,78,84,95,76,69,78,218,5, + 114,102,105,110,100,114,2,0,0,0,218,8,69,79,70,69, + 114,114,111,114,114,1,0,0,0,114,62,0,0,0,218,18, + 85,110,105,99,111,100,101,68,101,99,111,100,101,69,114,114, + 111,114,218,9,116,114,97,110,115,108,97,116,101,218,11,99, + 112,52,51,55,95,116,97,98,108,101,114,19,0,0,0,114, + 20,0,0,0,114,21,0,0,0,114,30,0,0,0,114,76, + 0,0,0,114,77,0,0,0,41,26,114,29,0,0,0,218, + 2,102,112,90,15,104,101,97,100,101,114,95,112,111,115,105, + 116,105,111,110,218,6,98,117,102,102,101,114,218,9,102,105, + 108,101,95,115,105,122,101,90,17,109,97,120,95,99,111,109, + 109,101,110,116,95,115,116,97,114,116,218,4,100,97,116,97, + 90,3,112,111,115,218,11,104,101,97,100,101,114,95,115,105, + 122,101,90,13,104,101,97,100,101,114,95,111,102,102,115,101, + 116,90,10,97,114,99,95,111,102,102,115,101,116,114,33,0, + 0,0,218,5,99,111,117,110,116,218,5,102,108,97,103,115, + 218,8,99,111,109,112,114,101,115,115,218,4,116,105,109,101, + 218,4,100,97,116,101,218,3,99,114,99,218,9,100,97,116, + 97,95,115,105,122,101,218,9,110,97,109,101,95,115,105,122, + 101,218,10,101,120,116,114,97,95,115,105,122,101,90,12,99, + 111,109,109,101,110,116,95,115,105,122,101,218,11,102,105,108, + 101,95,111,102,102,115,101,116,114,59,0,0,0,114,13,0, + 0,0,218,1,116,114,9,0,0,0,114,9,0,0,0,114, + 10,0,0,0,114,27,0,0,0,93,1,0,0,115,212,0, + 0,0,0,1,2,1,14,1,12,1,24,2,8,1,2,1, + 14,1,8,1,14,1,12,1,24,1,12,1,18,1,18,3, + 2,1,12,1,12,1,12,1,10,1,2,255,12,2,8,1, + 2,255,2,1,2,255,4,2,2,1,10,1,12,1,14,1, + 10,1,2,255,12,2,10,1,10,1,10,1,2,255,6,2, + 16,1,14,1,10,1,2,255,6,2,16,2,16,1,16,1, + 10,1,18,1,10,1,18,1,8,1,8,1,10,1,18,2, + 4,2,4,1,2,1,14,1,14,1,24,2,10,1,14,1, + 8,2,18,1,4,1,14,1,8,1,16,1,16,1,16,1, + 16,1,16,1,16,1,16,1,16,1,16,1,16,1,16,1, + 12,1,10,1,18,1,8,2,2,1,14,1,14,1,24,1, + 14,1,18,4,2,1,28,1,22,1,14,1,24,2,10,2, + 10,3,2,1,14,1,14,1,22,2,12,1,12,1,20,1, + 8,1,44,1,14,1,114,27,0,0,0,117,190,1,0,0, + 0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15, + 16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31, + 32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47, + 48,49,50,51,52,53,54,55,56,57,58,59,60,61,62,63, + 64,65,66,67,68,69,70,71,72,73,74,75,76,77,78,79, + 80,81,82,83,84,85,86,87,88,89,90,91,92,93,94,95, + 96,97,98,99,100,101,102,103,104,105,106,107,108,109,110,111, + 112,113,114,115,116,117,118,119,120,121,122,123,124,125,126,127, + 195,135,195,188,195,169,195,162,195,164,195,160,195,165,195,167, + 195,170,195,171,195,168,195,175,195,174,195,172,195,132,195,133, + 195,137,195,166,195,134,195,180,195,182,195,178,195,187,195,185, + 195,191,195,150,195,156,194,162,194,163,194,165,226,130,167,198, + 146,195,161,195,173,195,179,195,186,195,177,195,145,194,170,194, + 186,194,191,226,140,144,194,172,194,189,194,188,194,161,194,171, + 194,187,226,150,145,226,150,146,226,150,147,226,148,130,226,148, + 164,226,149,161,226,149,162,226,149,150,226,149,149,226,149,163, + 226,149,145,226,149,151,226,149,157,226,149,156,226,149,155,226, + 148,144,226,148,148,226,148,180,226,148,172,226,148,156,226,148, + 128,226,148,188,226,149,158,226,149,159,226,149,154,226,149,148, + 226,149,169,226,149,166,226,149,160,226,149,144,226,149,172,226, + 149,167,226,149,168,226,149,164,226,149,165,226,149,153,226,149, + 152,226,149,146,226,149,147,226,149,171,226,149,170,226,148,152, + 226,148,140,226,150,136,226,150,132,226,150,140,226,150,144,226, + 150,128,206,177,195,159,206,147,207,128,206,163,207,131,194,181, + 207,132,206,166,206,152,206,169,206,180,226,136,158,207,134,206, + 181,226,136,169,226,137,161,194,177,226,137,165,226,137,164,226, + 140,160,226,140,161,195,183,226,137,136,194,176,226,136,153,194, + 183,226,136,154,226,129,191,194,178,226,150,160,194,160,99,0, + 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,8, + 0,0,0,67,0,0,0,115,110,0,0,0,116,0,114,22, + 116,1,160,2,100,1,161,1,1,0,116,3,100,2,131,1, + 130,1,100,3,97,0,122,62,122,16,100,4,100,5,108,4, + 109,5,125,0,1,0,87,0,110,36,4,0,116,6,121,80, + 1,0,1,0,1,0,116,1,160,2,100,1,161,1,1,0, + 116,3,100,2,131,1,130,1,89,0,110,2,48,0,87,0, + 100,6,97,0,110,6,100,6,97,0,48,0,116,1,160,2, + 100,7,161,1,1,0,124,0,83,0,41,8,78,122,27,122, + 105,112,105,109,112,111,114,116,58,32,122,108,105,98,32,85, + 78,65,86,65,73,76,65,66,76,69,250,41,99,97,110,39, + 116,32,100,101,99,111,109,112,114,101,115,115,32,100,97,116, + 97,59,32,122,108,105,98,32,110,111,116,32,97,118,97,105, + 108,97,98,108,101,84,114,0,0,0,0,169,1,218,10,100, + 101,99,111,109,112,114,101,115,115,70,122,25,122,105,112,105, + 109,112,111,114,116,58,32,122,108,105,98,32,97,118,97,105, + 108,97,98,108,101,41,7,218,15,95,105,109,112,111,114,116, + 105,110,103,95,122,108,105,98,114,76,0,0,0,114,77,0, + 0,0,114,3,0,0,0,90,4,122,108,105,98,114,139,0, + 0,0,218,9,69,120,99,101,112,116,105,111,110,114,138,0, + 0,0,114,9,0,0,0,114,9,0,0,0,114,10,0,0, + 0,218,20,95,103,101,116,95,100,101,99,111,109,112,114,101, + 115,115,95,102,117,110,99,251,1,0,0,115,24,0,0,0, + 0,2,4,3,10,1,8,2,4,1,4,1,16,1,12,1, + 10,1,16,2,12,2,10,1,114,142,0,0,0,99,2,0, + 0,0,0,0,0,0,0,0,0,0,17,0,0,0,9,0, + 0,0,67,0,0,0,115,144,1,0,0,124,1,92,8,125, + 2,125,3,125,4,125,5,125,6,125,7,125,8,125,9,124, + 4,100,1,107,0,114,36,116,0,100,2,131,1,130,1,116, + 1,160,2,124,0,161,1,144,1,143,14,125,10,122,14,124, + 10,160,3,124,6,161,1,1,0,87,0,110,36,4,0,116, + 4,121,100,1,0,1,0,1,0,116,0,100,3,124,0,155, + 2,157,2,124,0,100,4,141,2,130,1,89,0,110,2,48, + 0,124,10,160,5,100,5,161,1,125,11,116,6,124,11,131, + 1,100,5,107,3,114,132,116,7,100,6,131,1,130,1,124, + 11,100,0,100,7,133,2,25,0,100,8,107,3,114,166,116, + 0,100,9,124,0,155,2,157,2,124,0,100,4,141,2,130, + 1,116,8,124,11,100,10,100,11,133,2,25,0,131,1,125, + 12,116,8,124,11,100,11,100,5,133,2,25,0,131,1,125, + 13,100,5,124,12,23,0,124,13,23,0,125,14,124,6,124, + 14,55,0,125,6,122,14,124,10,160,3,124,6,161,1,1, + 0,87,0,110,38,4,0,116,4,144,1,121,14,1,0,1, + 0,1,0,116,0,100,3,124,0,155,2,157,2,124,0,100, + 4,141,2,130,1,89,0,110,2,48,0,124,10,160,5,124, + 4,161,1,125,15,116,6,124,15,131,1,124,4,107,3,144, + 1,114,48,116,4,100,12,131,1,130,1,87,0,100,0,4, + 0,4,0,131,3,1,0,110,18,49,0,144,1,115,70,48, + 0,1,0,1,0,1,0,89,0,1,0,124,3,100,1,107, + 2,144,1,114,94,124,15,83,0,122,10,116,9,131,0,125, + 16,87,0,110,28,4,0,116,10,144,1,121,132,1,0,1, + 0,1,0,116,0,100,13,131,1,130,1,89,0,110,2,48, + 0,124,16,124,15,100,14,131,2,83,0,41,15,78,114,0, + 0,0,0,122,18,110,101,103,97,116,105,118,101,32,100,97, + 116,97,32,115,105,122,101,114,90,0,0,0,114,12,0,0, + 0,114,102,0,0,0,114,96,0,0,0,114,91,0,0,0, + 115,4,0,0,0,80,75,3,4,122,23,98,97,100,32,108, + 111,99,97,108,32,102,105,108,101,32,104,101,97,100,101,114, + 58,32,233,26,0,0,0,114,101,0,0,0,122,26,122,105, + 112,105,109,112,111,114,116,58,32,99,97,110,39,116,32,114, + 101,97,100,32,100,97,116,97,114,137,0,0,0,105,241,255, + 255,255,41,11,114,3,0,0,0,114,108,0,0,0,114,109, + 0,0,0,114,110,0,0,0,114,22,0,0,0,114,112,0, + 0,0,114,51,0,0,0,114,117,0,0,0,114,1,0,0, + 0,114,142,0,0,0,114,141,0,0,0,41,17,114,29,0, + 0,0,114,54,0,0,0,90,8,100,97,116,97,112,97,116, + 104,114,128,0,0,0,114,132,0,0,0,114,123,0,0,0, + 114,135,0,0,0,114,129,0,0,0,114,130,0,0,0,114, + 131,0,0,0,114,121,0,0,0,114,122,0,0,0,114,133, + 0,0,0,114,134,0,0,0,114,125,0,0,0,90,8,114, + 97,119,95,100,97,116,97,114,139,0,0,0,114,9,0,0, + 0,114,9,0,0,0,114,10,0,0,0,114,52,0,0,0, + 16,2,0,0,115,62,0,0,0,0,1,20,1,8,1,8, + 2,14,2,2,1,14,1,12,1,24,1,10,1,12,1,8, + 2,16,2,18,2,16,1,16,1,12,1,8,1,2,1,14, + 1,14,1,24,1,10,1,14,1,40,2,10,2,4,3,2, + 1,10,1,14,1,14,1,114,52,0,0,0,99,2,0,0, + 0,0,0,0,0,0,0,0,0,2,0,0,0,3,0,0, + 0,67,0,0,0,115,16,0,0,0,116,0,124,0,124,1, + 24,0,131,1,100,1,107,1,83,0,41,2,78,114,5,0, + 0,0,41,1,218,3,97,98,115,41,2,90,2,116,49,90, + 2,116,50,114,9,0,0,0,114,9,0,0,0,114,10,0, + 0,0,218,9,95,101,113,95,109,116,105,109,101,62,2,0, + 0,115,2,0,0,0,0,2,114,145,0,0,0,99,5,0, + 0,0,0,0,0,0,0,0,0,0,14,0,0,0,8,0, + 0,0,67,0,0,0,115,56,1,0,0,124,3,124,2,100, + 1,156,2,125,5,122,18,116,0,160,1,124,4,124,3,124, + 5,161,3,125,6,87,0,110,20,4,0,116,2,121,48,1, + 0,1,0,1,0,89,0,100,0,83,0,48,0,124,6,100, + 2,64,0,100,3,107,3,125,7,124,7,114,178,124,6,100, + 4,64,0,100,3,107,3,125,8,116,3,106,4,100,5,107, + 3,114,176,124,8,115,102,116,3,106,4,100,6,107,2,114, + 176,116,5,124,0,124,2,131,2,125,9,124,9,100,0,117, + 1,114,176,116,3,160,6,116,0,106,7,124,9,161,2,125, + 10,122,20,116,0,160,8,124,4,124,10,124,3,124,5,161, + 4,1,0,87,0,110,20,4,0,116,2,121,174,1,0,1, + 0,1,0,89,0,100,0,83,0,48,0,110,84,116,9,124, + 0,124,2,131,2,92,2,125,11,125,12,124,11,144,1,114, + 6,116,10,116,11,124,4,100,7,100,8,133,2,25,0,131, + 1,124,11,131,2,114,242,116,11,124,4,100,8,100,9,133, + 2,25,0,131,1,124,12,107,3,144,1,114,6,116,12,160, + 13,100,10,124,3,155,2,157,2,161,1,1,0,100,0,83, + 0,116,14,160,15,124,4,100,9,100,0,133,2,25,0,161, + 1,125,13,116,16,124,13,116,17,131,2,144,1,115,52,116, + 18,100,11,124,1,155,2,100,12,157,3,131,1,130,1,124, + 13,83,0,41,13,78,41,2,114,59,0,0,0,114,13,0, + 0,0,114,5,0,0,0,114,0,0,0,0,114,84,0,0, + 0,90,5,110,101,118,101,114,90,6,97,108,119,97,121,115, + 114,97,0,0,0,114,92,0,0,0,114,93,0,0,0,122, + 22,98,121,116,101,99,111,100,101,32,105,115,32,115,116,97, + 108,101,32,102,111,114,32,122,16,99,111,109,112,105,108,101, + 100,32,109,111,100,117,108,101,32,122,21,32,105,115,32,110, + 111,116,32,97,32,99,111,100,101,32,111,98,106,101,99,116, + 41,19,114,21,0,0,0,90,13,95,99,108,97,115,115,105, + 102,121,95,112,121,99,114,75,0,0,0,218,4,95,105,109, + 112,90,21,99,104,101,99,107,95,104,97,115,104,95,98,97, + 115,101,100,95,112,121,99,115,218,15,95,103,101,116,95,112, + 121,99,95,115,111,117,114,99,101,218,11,115,111,117,114,99, + 101,95,104,97,115,104,90,17,95,82,65,87,95,77,65,71, + 73,67,95,78,85,77,66,69,82,90,18,95,118,97,108,105, + 100,97,116,101,95,104,97,115,104,95,112,121,99,218,29,95, + 103,101,116,95,109,116,105,109,101,95,97,110,100,95,115,105, + 122,101,95,111,102,95,115,111,117,114,99,101,114,145,0,0, + 0,114,2,0,0,0,114,76,0,0,0,114,77,0,0,0, + 218,7,109,97,114,115,104,97,108,90,5,108,111,97,100,115, + 114,15,0,0,0,218,10,95,99,111,100,101,95,116,121,112, + 101,218,9,84,121,112,101,69,114,114,111,114,41,14,114,32, + 0,0,0,114,53,0,0,0,114,63,0,0,0,114,38,0, + 0,0,114,124,0,0,0,90,11,101,120,99,95,100,101,116, + 97,105,108,115,114,127,0,0,0,90,10,104,97,115,104,95, + 98,97,115,101,100,90,12,99,104,101,99,107,95,115,111,117, + 114,99,101,90,12,115,111,117,114,99,101,95,98,121,116,101, + 115,114,148,0,0,0,90,12,115,111,117,114,99,101,95,109, + 116,105,109,101,90,11,115,111,117,114,99,101,95,115,105,122, + 101,114,46,0,0,0,114,9,0,0,0,114,9,0,0,0, + 114,10,0,0,0,218,15,95,117,110,109,97,114,115,104,97, + 108,95,99,111,100,101,72,2,0,0,115,82,0,0,0,0, + 2,2,1,2,254,6,5,2,1,18,1,12,1,8,2,12, + 1,4,1,12,1,10,1,2,255,2,1,8,255,2,2,10, + 1,8,1,4,1,4,1,2,254,4,5,2,1,4,1,8, + 255,8,2,12,1,10,3,8,255,6,3,6,3,22,1,18, + 255,4,2,4,1,8,255,4,2,4,2,18,1,12,1,16, + 1,114,153,0,0,0,99,1,0,0,0,0,0,0,0,0, + 0,0,0,1,0,0,0,4,0,0,0,67,0,0,0,115, + 28,0,0,0,124,0,160,0,100,1,100,2,161,2,125,0, + 124,0,160,0,100,3,100,2,161,2,125,0,124,0,83,0, + 41,4,78,115,2,0,0,0,13,10,243,1,0,0,0,10, + 243,1,0,0,0,13,41,1,114,19,0,0,0,41,1,218, + 6,115,111,117,114,99,101,114,9,0,0,0,114,9,0,0, + 0,114,10,0,0,0,218,23,95,110,111,114,109,97,108,105, + 122,101,95,108,105,110,101,95,101,110,100,105,110,103,115,123, + 2,0,0,115,6,0,0,0,0,1,12,1,12,1,114,157, + 0,0,0,99,2,0,0,0,0,0,0,0,0,0,0,0, + 2,0,0,0,6,0,0,0,67,0,0,0,115,24,0,0, + 0,116,0,124,1,131,1,125,1,116,1,124,1,124,0,100, + 1,100,2,100,3,141,4,83,0,41,4,78,114,74,0,0, + 0,84,41,1,90,12,100,111,110,116,95,105,110,104,101,114, + 105,116,41,2,114,157,0,0,0,218,7,99,111,109,112,105, + 108,101,41,2,114,53,0,0,0,114,156,0,0,0,114,9, + 0,0,0,114,9,0,0,0,114,10,0,0,0,218,15,95, + 99,111,109,112,105,108,101,95,115,111,117,114,99,101,130,2, + 0,0,115,4,0,0,0,0,1,8,1,114,159,0,0,0, + 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,11,0,0,0,67,0,0,0,115,68,0,0,0,116,0, + 160,1,124,0,100,1,63,0,100,2,23,0,124,0,100,3, + 63,0,100,4,64,0,124,0,100,5,64,0,124,1,100,6, + 63,0,124,1,100,3,63,0,100,7,64,0,124,1,100,5, + 64,0,100,8,20,0,100,9,100,9,100,9,102,9,161,1, + 83,0,41,10,78,233,9,0,0,0,105,188,7,0,0,233, + 5,0,0,0,233,15,0,0,0,233,31,0,0,0,233,11, + 0,0,0,233,63,0,0,0,114,84,0,0,0,114,14,0, + 0,0,41,2,114,129,0,0,0,90,6,109,107,116,105,109, + 101,41,2,218,1,100,114,136,0,0,0,114,9,0,0,0, + 114,9,0,0,0,114,10,0,0,0,218,14,95,112,97,114, + 115,101,95,100,111,115,116,105,109,101,136,2,0,0,115,18, + 0,0,0,0,1,4,1,10,1,10,1,6,1,6,1,10, + 1,10,1,6,249,114,167,0,0,0,99,2,0,0,0,0, + 0,0,0,0,0,0,0,6,0,0,0,10,0,0,0,67, + 0,0,0,115,114,0,0,0,122,82,124,1,100,1,100,0, + 133,2,25,0,100,2,118,0,115,22,74,0,130,1,124,1, + 100,0,100,1,133,2,25,0,125,1,124,0,106,0,124,1, + 25,0,125,2,124,2,100,3,25,0,125,3,124,2,100,4, + 25,0,125,4,124,2,100,5,25,0,125,5,116,1,124,4, + 124,3,131,2,124,5,102,2,87,0,83,0,4,0,116,2, + 116,3,116,4,102,3,121,108,1,0,1,0,1,0,89,0, + 100,6,83,0,48,0,100,0,83,0,41,7,78,114,14,0, + 0,0,169,2,218,1,99,218,1,111,114,161,0,0,0,233, + 6,0,0,0,233,3,0,0,0,41,2,114,0,0,0,0, + 114,0,0,0,0,41,5,114,28,0,0,0,114,167,0,0, + 0,114,26,0,0,0,218,10,73,110,100,101,120,69,114,114, + 111,114,114,152,0,0,0,41,6,114,32,0,0,0,114,13, + 0,0,0,114,54,0,0,0,114,129,0,0,0,114,130,0, + 0,0,90,17,117,110,99,111,109,112,114,101,115,115,101,100, + 95,115,105,122,101,114,9,0,0,0,114,9,0,0,0,114, + 10,0,0,0,114,149,0,0,0,149,2,0,0,115,20,0, + 0,0,0,1,2,2,20,1,12,1,10,3,8,1,8,1, + 8,1,16,1,18,1,114,149,0,0,0,99,2,0,0,0, + 0,0,0,0,0,0,0,0,3,0,0,0,8,0,0,0, + 67,0,0,0,115,84,0,0,0,124,1,100,1,100,0,133, + 2,25,0,100,2,118,0,115,20,74,0,130,1,124,1,100, + 0,100,1,133,2,25,0,125,1,122,14,124,0,106,0,124, + 1,25,0,125,2,87,0,110,20,4,0,116,1,121,66,1, + 0,1,0,1,0,89,0,100,0,83,0,48,0,116,2,124, + 0,106,3,124,2,131,2,83,0,100,0,83,0,41,3,78, + 114,14,0,0,0,114,168,0,0,0,41,4,114,28,0,0, + 0,114,26,0,0,0,114,52,0,0,0,114,29,0,0,0, + 41,3,114,32,0,0,0,114,13,0,0,0,114,54,0,0, + 0,114,9,0,0,0,114,9,0,0,0,114,10,0,0,0, + 114,147,0,0,0,168,2,0,0,115,14,0,0,0,0,2, + 20,1,12,2,2,1,14,1,12,1,8,2,114,147,0,0, + 0,99,2,0,0,0,0,0,0,0,0,0,0,0,11,0, + 0,0,9,0,0,0,67,0,0,0,115,196,0,0,0,116, + 0,124,0,124,1,131,2,125,2,116,1,68,0,93,158,92, + 3,125,3,125,4,125,5,124,2,124,3,23,0,125,6,116, + 2,106,3,100,1,124,0,106,4,116,5,124,6,100,2,100, + 3,141,5,1,0,122,14,124,0,106,6,124,6,25,0,125, + 7,87,0,110,18,4,0,116,7,121,86,1,0,1,0,1, + 0,89,0,113,14,48,0,124,7,100,4,25,0,125,8,116, + 8,124,0,106,4,124,7,131,2,125,9,124,4,114,130,116, + 9,124,0,124,8,124,6,124,1,124,9,131,5,125,10,110, + 10,116,10,124,8,124,9,131,2,125,10,124,10,100,0,117, + 0,114,150,113,14,124,7,100,4,25,0,125,8,124,10,124, + 5,124,8,102,3,2,0,1,0,83,0,113,14,116,11,100, + 5,124,1,155,2,157,2,124,1,100,6,141,2,130,1,100, + 0,83,0,41,7,78,122,13,116,114,121,105,110,103,32,123, + 125,123,125,123,125,114,84,0,0,0,41,1,90,9,118,101, + 114,98,111,115,105,116,121,114,0,0,0,0,114,57,0,0, + 0,114,58,0,0,0,41,12,114,36,0,0,0,114,87,0, + 0,0,114,76,0,0,0,114,77,0,0,0,114,29,0,0, + 0,114,20,0,0,0,114,28,0,0,0,114,26,0,0,0, + 114,52,0,0,0,114,153,0,0,0,114,159,0,0,0,114, + 3,0,0,0,41,11,114,32,0,0,0,114,38,0,0,0, + 114,13,0,0,0,114,88,0,0,0,114,89,0,0,0,114, + 47,0,0,0,114,63,0,0,0,114,54,0,0,0,114,40, + 0,0,0,114,124,0,0,0,114,46,0,0,0,114,9,0, + 0,0,114,9,0,0,0,114,10,0,0,0,114,44,0,0, + 0,183,2,0,0,115,36,0,0,0,0,1,10,1,14,1, + 8,1,22,1,2,1,14,1,12,1,6,2,8,1,12,1, + 4,1,18,2,10,1,8,3,2,1,8,1,16,2,114,44, + 0,0,0,41,44,114,82,0,0,0,90,26,95,102,114,111, + 122,101,110,95,105,109,112,111,114,116,108,105,98,95,101,120, + 116,101,114,110,97,108,114,21,0,0,0,114,1,0,0,0, + 114,2,0,0,0,90,17,95,102,114,111,122,101,110,95,105, + 109,112,111,114,116,108,105,98,114,76,0,0,0,114,146,0, + 0,0,114,108,0,0,0,114,150,0,0,0,114,67,0,0, + 0,114,129,0,0,0,90,7,95,95,97,108,108,95,95,114, + 20,0,0,0,90,15,112,97,116,104,95,115,101,112,97,114, + 97,116,111,114,115,114,18,0,0,0,114,75,0,0,0,114, + 3,0,0,0,114,25,0,0,0,218,4,116,121,112,101,114, + 70,0,0,0,114,111,0,0,0,114,113,0,0,0,114,115, + 0,0,0,114,4,0,0,0,114,87,0,0,0,114,36,0, + 0,0,114,37,0,0,0,114,35,0,0,0,114,27,0,0, + 0,114,120,0,0,0,114,140,0,0,0,114,142,0,0,0, + 114,52,0,0,0,114,145,0,0,0,114,153,0,0,0,218, + 8,95,95,99,111,100,101,95,95,114,151,0,0,0,114,157, + 0,0,0,114,159,0,0,0,114,167,0,0,0,114,149,0, + 0,0,114,147,0,0,0,114,44,0,0,0,114,9,0,0, 0,114,9,0,0,0,114,9,0,0,0,114,10,0,0,0, - 218,13,114,101,115,111,117,114,99,101,95,112,97,116,104,233, - 2,0,0,115,2,0,0,0,0,4,122,38,95,90,105,112, - 73,109,112,111,114,116,82,101,115,111,117,114,99,101,82,101, - 97,100,101,114,46,114,101,115,111,117,114,99,101,95,112,97, - 116,104,99,2,0,0,0,0,0,0,0,0,0,0,0,4, - 0,0,0,8,0,0,0,67,0,0,0,115,70,0,0,0, - 124,0,106,0,160,1,100,1,100,2,161,2,125,2,124,2, - 155,0,100,2,124,1,155,0,157,3,125,3,122,16,124,0, - 106,2,160,3,124,3,161,1,1,0,87,0,110,20,4,0, - 116,4,121,64,1,0,1,0,1,0,89,0,100,3,83,0, - 48,0,100,4,83,0,41,5,78,114,85,0,0,0,114,109, - 0,0,0,70,84,41,5,114,38,0,0,0,114,19,0,0, - 0,114,4,0,0,0,114,55,0,0,0,114,22,0,0,0, - 41,4,114,32,0,0,0,114,59,0,0,0,114,179,0,0, - 0,114,13,0,0,0,114,9,0,0,0,114,9,0,0,0, - 114,10,0,0,0,218,11,105,115,95,114,101,115,111,117,114, - 99,101,239,2,0,0,115,14,0,0,0,0,3,14,1,14, - 1,2,1,16,1,12,1,8,1,122,36,95,90,105,112,73, - 109,112,111,114,116,82,101,115,111,117,114,99,101,82,101,97, - 100,101,114,46,105,115,95,114,101,115,111,117,114,99,101,99, - 1,0,0,0,0,0,0,0,0,0,0,0,9,0,0,0, - 9,0,0,0,99,0,0,0,115,184,0,0,0,100,1,100, - 2,108,0,109,1,125,1,1,0,124,1,124,0,106,2,160, - 3,124,0,106,4,161,1,131,1,125,2,124,2,160,5,124, - 0,106,2,106,6,161,1,125,3,124,3,106,7,100,3,107, - 2,115,58,74,0,130,1,124,3,106,8,125,4,116,9,131, - 0,125,5,124,0,106,2,106,10,68,0,93,100,125,6,122, - 18,124,1,124,6,131,1,160,5,124,4,161,1,125,7,87, - 0,110,22,4,0,116,11,121,122,1,0,1,0,1,0,89, - 0,113,78,89,0,110,2,48,0,124,7,106,8,106,7,125, - 8,116,12,124,8,131,1,100,1,107,2,114,154,124,7,106, - 7,86,0,1,0,113,78,124,8,124,5,118,1,114,78,124, - 5,160,13,124,8,161,1,1,0,124,8,86,0,1,0,113, - 78,100,0,83,0,41,4,78,114,0,0,0,0,41,1,218, - 4,80,97,116,104,114,60,0,0,0,41,14,90,7,112,97, - 116,104,108,105,98,114,183,0,0,0,114,4,0,0,0,114, - 56,0,0,0,114,38,0,0,0,90,11,114,101,108,97,116, - 105,118,101,95,116,111,114,29,0,0,0,114,59,0,0,0, - 90,6,112,97,114,101,110,116,218,3,115,101,116,114,28,0, - 0,0,114,23,0,0,0,114,51,0,0,0,218,3,97,100, - 100,41,9,114,32,0,0,0,114,183,0,0,0,90,13,102, - 117,108,108,110,97,109,101,95,112,97,116,104,90,13,114,101, - 108,97,116,105,118,101,95,112,97,116,104,90,12,112,97,99, - 107,97,103,101,95,112,97,116,104,90,12,115,117,98,100,105, - 114,115,95,115,101,101,110,218,8,102,105,108,101,110,97,109, - 101,90,8,114,101,108,97,116,105,118,101,90,11,112,97,114, - 101,110,116,95,110,97,109,101,114,9,0,0,0,114,9,0, - 0,0,114,10,0,0,0,218,8,99,111,110,116,101,110,116, - 115,250,2,0,0,115,34,0,0,0,0,8,12,1,18,1, - 14,3,14,1,6,1,6,1,12,1,2,1,18,1,12,1, - 10,5,8,1,12,1,10,1,8,1,10,1,122,33,95,90, - 105,112,73,109,112,111,114,116,82,101,115,111,117,114,99,101, - 82,101,97,100,101,114,46,99,111,110,116,101,110,116,115,78, - 41,10,114,6,0,0,0,114,7,0,0,0,114,8,0,0, - 0,114,84,0,0,0,114,81,0,0,0,114,34,0,0,0, - 114,180,0,0,0,114,181,0,0,0,114,182,0,0,0,114, - 187,0,0,0,114,9,0,0,0,114,9,0,0,0,114,9, - 0,0,0,114,10,0,0,0,114,80,0,0,0,212,2,0, - 0,115,14,0,0,0,8,1,4,5,4,2,8,4,8,9, - 8,6,8,11,114,80,0,0,0,41,45,114,84,0,0,0, - 90,26,95,102,114,111,122,101,110,95,105,109,112,111,114,116, - 108,105,98,95,101,120,116,101,114,110,97,108,114,21,0,0, - 0,114,1,0,0,0,114,2,0,0,0,90,17,95,102,114, - 111,122,101,110,95,105,109,112,111,114,116,108,105,98,114,76, - 0,0,0,114,148,0,0,0,114,110,0,0,0,114,152,0, - 0,0,114,67,0,0,0,114,131,0,0,0,90,7,95,95, - 97,108,108,95,95,114,20,0,0,0,90,15,112,97,116,104, - 95,115,101,112,97,114,97,116,111,114,115,114,18,0,0,0, - 114,75,0,0,0,114,3,0,0,0,114,25,0,0,0,218, - 4,116,121,112,101,114,70,0,0,0,114,113,0,0,0,114, - 115,0,0,0,114,117,0,0,0,114,4,0,0,0,114,89, - 0,0,0,114,36,0,0,0,114,37,0,0,0,114,35,0, - 0,0,114,27,0,0,0,114,122,0,0,0,114,142,0,0, - 0,114,144,0,0,0,114,52,0,0,0,114,147,0,0,0, - 114,155,0,0,0,218,8,95,95,99,111,100,101,95,95,114, - 153,0,0,0,114,159,0,0,0,114,161,0,0,0,114,169, - 0,0,0,114,151,0,0,0,114,149,0,0,0,114,44,0, - 0,0,114,80,0,0,0,114,9,0,0,0,114,9,0,0, - 0,114,9,0,0,0,114,10,0,0,0,218,8,60,109,111, - 100,117,108,101,62,1,0,0,0,115,88,0,0,0,4,16, - 8,1,16,1,8,1,8,1,8,1,8,1,8,1,8,2, - 8,3,6,1,14,3,16,4,4,2,8,2,4,1,4,1, - 4,2,14,127,0,127,0,1,12,1,12,1,2,1,2,252, - 4,9,8,4,8,9,8,31,8,126,2,254,2,29,4,5, - 8,21,8,46,8,10,8,46,10,5,8,7,8,6,8,13, - 8,19,8,15,8,26, + 218,8,60,109,111,100,117,108,101,62,1,0,0,0,115,84, + 0,0,0,4,16,8,1,16,1,8,1,8,1,8,1,8, + 1,8,1,8,2,8,3,6,1,14,3,16,4,4,2,8, + 2,4,1,4,1,4,2,14,127,0,125,12,1,12,1,2, + 1,2,252,4,9,8,4,8,9,8,31,8,126,2,254,2, + 29,4,5,8,21,8,46,8,10,8,46,10,5,8,7,8, + 6,8,13,8,19,8,15, }; From webhook-mailer at python.org Sun Jun 7 21:57:07 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 08 Jun 2020 01:57:07 -0000 Subject: [Python-checkins] bpo-40903: Handle multiple '=' in invalid assignment rules in the PEG parser (GH-20697) Message-ID: https://github.com/python/cpython/commit/9f495908c5bd3645ed1af82d7bae6782720dab77 commit: 9f495908c5bd3645ed1af82d7bae6782720dab77 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-07T18:57:00-07:00 summary: bpo-40903: Handle multiple '=' in invalid assignment rules in the PEG parser (GH-20697) Automerge-Triggered-By: @pablogsal files: A Misc/NEWS.d/next/Core and Builtins/2020-06-07-22-50-10.bpo-40903.7dWejS.rst M Grammar/python.gram M Lib/test/test_syntax.py M Parser/pegen/parse.c M Parser/pegen/pegen.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 19d9bb36fed5f..dd425eff30b7d 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -92,7 +92,7 @@ assignment[stmt_ty]: | a=('(' b=single_target ')' { b } | single_subscript_attribute_target) ':' b=expression c=['=' d=annotated_rhs { d }] { CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) } - | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) tc=[TYPE_COMMENT] { + | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) !'=' tc=[TYPE_COMMENT] { _Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } | a=single_target b=augassign c=(yield_expr | star_expressions) { _Py_AugAssign(a, b->kind, c, EXTRA) } @@ -646,10 +646,11 @@ invalid_assignment: RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } | a=expression ':' expression ['=' annotated_rhs] { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } - | a=star_expressions '=' (yield_expr | star_expressions) { + | (star_targets '=')* a=star_expressions '=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( _PyPegen_get_invalid_target(a), "cannot assign to %s", _PyPegen_get_expr_name(_PyPegen_get_invalid_target(a))) } + | (star_targets '=')* a=yield_expr '=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "assignment to yield expression not possible") } | a=star_expressions augassign (yield_expr | star_expressions) { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( a, diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 4df5535b0053b..f41426a4e9d2d 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -63,6 +63,10 @@ Traceback (most recent call last): SyntaxError: cannot assign to function call +>>> yield = 1 +Traceback (most recent call last): +SyntaxError: assignment to yield expression not possible + >>> del f() Traceback (most recent call last): SyntaxError: cannot delete function call @@ -136,6 +140,18 @@ Traceback (most recent call last): SyntaxError: cannot assign to conditional expression +>>> True = True = 3 +Traceback (most recent call last): +SyntaxError: cannot assign to True + +>>> x = y = True = z = 3 +Traceback (most recent call last): +SyntaxError: cannot assign to True + +>>> x = y = yield = 1 +Traceback (most recent call last): +SyntaxError: assignment to yield expression not possible + >>> a, b += 1, 2 Traceback (most recent call last): SyntaxError: 'tuple' is an illegal expression for augmented assignment @@ -148,6 +164,10 @@ Traceback (most recent call last): SyntaxError: 'list' is an illegal expression for augmented assignment +>>> p = p = +Traceback (most recent call last): +SyntaxError: invalid syntax + From compiler_complex_args(): >>> def f(None=1): @@ -155,7 +175,6 @@ Traceback (most recent call last): SyntaxError: invalid syntax - From ast_for_arguments(): >>> def f(x, y=1, z): diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-07-22-50-10.bpo-40903.7dWejS.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-07-22-50-10.bpo-40903.7dWejS.rst new file mode 100644 index 0000000000000..5ee72c14ad352 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-07-22-50-10.bpo-40903.7dWejS.rst @@ -0,0 +1 @@ +Fixed a possible segfault in the new PEG parser when producing error messages for invalid assignments of the form :code:`p=p=`. Patch by Pablo Galindo \ No newline at end of file diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index b63924177d400..e5738e3e04afe 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -358,11 +358,11 @@ static KeywordToken *reserved_keywords[] = { #define _tmp_125_type 1280 #define _loop0_126_type 1281 #define _tmp_127_type 1282 -#define _tmp_128_type 1283 -#define _tmp_129_type 1284 +#define _loop0_128_type 1283 +#define _loop0_129_type 1284 #define _tmp_130_type 1285 -#define _loop0_131_type 1286 -#define _tmp_132_type 1287 +#define _tmp_131_type 1286 +#define _loop0_132_type 1287 #define _tmp_133_type 1288 #define _tmp_134_type 1289 #define _tmp_135_type 1290 @@ -376,9 +376,12 @@ static KeywordToken *reserved_keywords[] = { #define _tmp_143_type 1298 #define _tmp_144_type 1299 #define _tmp_145_type 1300 -#define _loop1_146_type 1301 +#define _tmp_146_type 1301 #define _tmp_147_type 1302 #define _tmp_148_type 1303 +#define _loop1_149_type 1304 +#define _tmp_150_type 1305 +#define _tmp_151_type 1306 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -663,11 +666,11 @@ static asdl_seq *_gather_123_rule(Parser *p); static void *_tmp_125_rule(Parser *p); static asdl_seq *_loop0_126_rule(Parser *p); static void *_tmp_127_rule(Parser *p); -static void *_tmp_128_rule(Parser *p); -static void *_tmp_129_rule(Parser *p); +static asdl_seq *_loop0_128_rule(Parser *p); +static asdl_seq *_loop0_129_rule(Parser *p); static void *_tmp_130_rule(Parser *p); -static asdl_seq *_loop0_131_rule(Parser *p); -static void *_tmp_132_rule(Parser *p); +static void *_tmp_131_rule(Parser *p); +static asdl_seq *_loop0_132_rule(Parser *p); static void *_tmp_133_rule(Parser *p); static void *_tmp_134_rule(Parser *p); static void *_tmp_135_rule(Parser *p); @@ -681,9 +684,12 @@ static void *_tmp_142_rule(Parser *p); static void *_tmp_143_rule(Parser *p); static void *_tmp_144_rule(Parser *p); static void *_tmp_145_rule(Parser *p); -static asdl_seq *_loop1_146_rule(Parser *p); +static void *_tmp_146_rule(Parser *p); static void *_tmp_147_rule(Parser *p); static void *_tmp_148_rule(Parser *p); +static asdl_seq *_loop1_149_rule(Parser *p); +static void *_tmp_150_rule(Parser *p); +static void *_tmp_151_rule(Parser *p); // file: statements? $ @@ -1998,7 +2004,7 @@ compound_stmt_rule(Parser *p) // assignment: // | NAME ':' expression ['=' annotated_rhs] // | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] -// | ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? +// | ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? // | single_target augassign (yield_expr | star_expressions) // | invalid_assignment static stmt_ty @@ -2104,12 +2110,12 @@ assignment_rule(Parser *p) D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); } - { // ((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT? + { // ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT?")); + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); asdl_seq * a; void *b; void *tc; @@ -2118,10 +2124,12 @@ assignment_rule(Parser *p) && (b = _tmp_23_rule(p)) // yield_expr | star_expressions && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' + && (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? ) { - D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT?")); + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -2141,7 +2149,7 @@ assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) TYPE_COMMENT?")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); } { // single_target augassign (yield_expr | star_expressions) if (p->error_indicator) { @@ -14568,7 +14576,8 @@ invalid_named_expression_rule(Parser *p) // | tuple ':' // | star_named_expression ',' star_named_expressions* ':' // | expression ':' expression ['=' annotated_rhs] -// | star_expressions '=' (yield_expr | star_expressions) +// | ((star_targets '='))* star_expressions '=' +// | ((star_targets '='))* yield_expr '=' // | star_expressions augassign (yield_expr | star_expressions) static void * invalid_assignment_rule(Parser *p) @@ -14701,24 +14710,24 @@ invalid_assignment_rule(Parser *p) D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); } - { // star_expressions '=' (yield_expr | star_expressions) + { // ((star_targets '='))* star_expressions '=' if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions '=' (yield_expr | star_expressions)")); + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); Token * _literal; - void *_tmp_128_var; + asdl_seq * _loop0_128_var; expr_ty a; if ( + (_loop0_128_var = _loop0_128_rule(p)) // ((star_targets '='))* + && (a = star_expressions_rule(p)) // star_expressions && (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (_tmp_128_var = _tmp_128_rule(p)) // yield_expr | star_expressions ) { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions '=' (yield_expr | star_expressions)")); + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -14729,7 +14738,37 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions '=' (yield_expr | star_expressions)")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))* star_expressions '='")); + } + { // ((star_targets '='))* yield_expr '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); + Token * _literal; + asdl_seq * _loop0_129_var; + expr_ty a; + if ( + (_loop0_129_var = _loop0_129_rule(p)) // ((star_targets '='))* + && + (a = yield_expr_rule(p)) // yield_expr + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "assignment to yield expression not possible" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))* yield_expr '='")); } { // star_expressions augassign (yield_expr | star_expressions) if (p->error_indicator) { @@ -14737,7 +14776,7 @@ invalid_assignment_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_129_var; + void *_tmp_130_var; expr_ty a; AugOperator* augassign_var; if ( @@ -14745,7 +14784,7 @@ invalid_assignment_rule(Parser *p) && (augassign_var = augassign_rule(p)) // augassign && - (_tmp_129_var = _tmp_129_rule(p)) // yield_expr | star_expressions + (_tmp_130_var = _tmp_130_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); @@ -14827,11 +14866,11 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_130_var; + void *_tmp_131_var; expr_ty a; asdl_seq* for_if_clauses_var; if ( - (_tmp_130_var = _tmp_130_rule(p)) // '[' | '(' | '{' + (_tmp_131_var = _tmp_131_rule(p)) // '[' | '(' | '{' && (a = starred_expression_rule(p)) // starred_expression && @@ -14928,13 +14967,13 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); - asdl_seq * _loop0_131_var; - void *_tmp_132_var; + asdl_seq * _loop0_132_var; + void *_tmp_133_var; arg_ty param_no_default_var; if ( - (_loop0_131_var = _loop0_131_rule(p)) // param_no_default* + (_loop0_132_var = _loop0_132_rule(p)) // param_no_default* && - (_tmp_132_var = _tmp_132_rule(p)) // slash_with_default | param_with_default+ + (_tmp_133_var = _tmp_133_rule(p)) // slash_with_default | param_with_default+ && (param_no_default_var = param_no_default_rule(p)) // param_no_default ) @@ -14976,11 +15015,11 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); Token * _literal; - void *_tmp_133_var; + void *_tmp_134_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_133_var = _tmp_133_rule(p)) // ')' | ',' (')' | '**') + (_tmp_134_var = _tmp_134_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -15050,11 +15089,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_134_var; + void *_tmp_135_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_134_var = _tmp_134_rule(p)) // ':' | ',' (':' | '**') + (_tmp_135_var = _tmp_135_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -16464,12 +16503,12 @@ _loop1_22_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_135_var; + void *_tmp_136_var; while ( - (_tmp_135_var = _tmp_135_rule(p)) // star_targets '=' + (_tmp_136_var = _tmp_136_rule(p)) // star_targets '=' ) { - _res = _tmp_135_var; + _res = _tmp_136_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -16917,12 +16956,12 @@ _loop0_30_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_136_var; + void *_tmp_137_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) // '.' | '...' + (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' ) { - _res = _tmp_136_var; + _res = _tmp_137_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -16983,12 +17022,12 @@ _loop1_31_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_137_var; + void *_tmp_138_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' + (_tmp_138_var = _tmp_138_rule(p)) // '.' | '...' ) { - _res = _tmp_137_var; + _res = _tmp_138_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19115,12 +19154,12 @@ _loop1_67_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_138_var; + void *_tmp_139_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) // '@' named_expression NEWLINE + (_tmp_139_var = _tmp_139_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_138_var; + _res = _tmp_139_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19347,12 +19386,12 @@ _loop1_71_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_139_var; + void *_tmp_140_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) // ',' star_expression + (_tmp_140_var = _tmp_140_rule(p)) // ',' star_expression ) { - _res = _tmp_139_var; + _res = _tmp_140_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19532,12 +19571,12 @@ _loop1_74_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_140_var; + void *_tmp_141_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) // ',' expression + (_tmp_141_var = _tmp_141_rule(p)) // ',' expression ) { - _res = _tmp_140_var; + _res = _tmp_141_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -20562,12 +20601,12 @@ _loop1_89_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_141_var; + void *_tmp_142_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) // 'or' conjunction + (_tmp_142_var = _tmp_142_rule(p)) // 'or' conjunction ) { - _res = _tmp_141_var; + _res = _tmp_142_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -20633,12 +20672,12 @@ _loop1_90_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_142_var; + void *_tmp_143_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) // 'and' inversion + (_tmp_143_var = _tmp_143_rule(p)) // 'and' inversion ) { - _res = _tmp_142_var; + _res = _tmp_143_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21554,12 +21593,12 @@ _loop0_105_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_143_var; + void *_tmp_144_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) // 'if' disjunction + (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction ) { - _res = _tmp_143_var; + _res = _tmp_144_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21620,12 +21659,12 @@ _loop0_106_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_144_var; + void *_tmp_145_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction + (_tmp_145_var = _tmp_145_rule(p)) // 'if' disjunction ) { - _res = _tmp_144_var; + _res = _tmp_145_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22230,12 +22269,12 @@ _loop0_117_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_145_var; + void *_tmp_146_var; while ( - (_tmp_145_var = _tmp_145_rule(p)) // ',' star_target + (_tmp_146_var = _tmp_146_rule(p)) // ',' star_target ) { - _res = _tmp_145_var; + _res = _tmp_146_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22812,64 +22851,141 @@ _tmp_127_rule(Parser *p) return _res; } -// _tmp_128: yield_expr | star_expressions -static void * -_tmp_128_rule(Parser *p) +// _loop0_128: (star_targets '=') +static asdl_seq * +_loop0_128_rule(Parser *p) { D(p->level++); if (p->error_indicator) { D(p->level--); return NULL; } - void * _res = NULL; + void *_res = NULL; int _mark = p->mark; - { // yield_expr + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (star_targets '=') if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); - expr_ty yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr + D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_147_var; + while ( + (_tmp_147_var = _tmp_147_rule(p)) // star_targets '=' ) { - D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); - _res = yield_expr_var; - goto done; + _res = _tmp_147_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } - { // star_expressions + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_128_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_129: (star_targets '=') +static asdl_seq * +_loop0_129_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (star_targets '=') if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions + D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_148_var; + while ( + (_tmp_148_var = _tmp_148_rule(p)) // star_targets '=' ) { - D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - _res = star_expressions_var; - goto done; + _res = _tmp_148_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } - _res = NULL; - done: + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_129_type, _seq); D(p->level--); - return _res; + return _seq; } -// _tmp_129: yield_expr | star_expressions +// _tmp_130: yield_expr | star_expressions static void * -_tmp_129_rule(Parser *p) +_tmp_130_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22883,18 +22999,18 @@ _tmp_129_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_129[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -22902,18 +23018,18 @@ _tmp_129_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_129[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_129[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -22922,9 +23038,9 @@ _tmp_129_rule(Parser *p) return _res; } -// _tmp_130: '[' | '(' | '{' +// _tmp_131: '[' | '(' | '{' static void * -_tmp_130_rule(Parser *p) +_tmp_131_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22938,18 +23054,18 @@ _tmp_130_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' @@ -22957,18 +23073,18 @@ _tmp_130_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' @@ -22976,18 +23092,18 @@ _tmp_130_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -22996,9 +23112,9 @@ _tmp_130_rule(Parser *p) return _res; } -// _loop0_131: param_no_default +// _loop0_132: param_no_default static asdl_seq * -_loop0_131_rule(Parser *p) +_loop0_132_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23022,7 +23138,7 @@ _loop0_131_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -23044,7 +23160,7 @@ _loop0_131_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23057,14 +23173,14 @@ _loop0_131_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_131_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_132_type, _seq); D(p->level--); return _seq; } -// _tmp_132: slash_with_default | param_with_default+ +// _tmp_133: slash_with_default | param_with_default+ static void * -_tmp_132_rule(Parser *p) +_tmp_133_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23078,18 +23194,18 @@ _tmp_132_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } { // param_with_default+ @@ -23097,18 +23213,18 @@ _tmp_132_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_146_var; + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + asdl_seq * _loop1_149_var; if ( - (_loop1_146_var = _loop1_146_rule(p)) // param_with_default+ + (_loop1_149_var = _loop1_149_rule(p)) // param_with_default+ ) { - D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_146_var; + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + _res = _loop1_149_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+")); } _res = NULL; @@ -23117,9 +23233,9 @@ _tmp_132_rule(Parser *p) return _res; } -// _tmp_133: ')' | ',' (')' | '**') +// _tmp_134: ')' | ',' (')' | '**') static void * -_tmp_133_rule(Parser *p) +_tmp_134_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23133,18 +23249,18 @@ _tmp_133_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -23152,21 +23268,21 @@ _tmp_133_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_147_var; + void *_tmp_150_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_147_var = _tmp_147_rule(p)) // ')' | '**' + (_tmp_150_var = _tmp_150_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_147_var); + D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_150_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -23175,9 +23291,9 @@ _tmp_133_rule(Parser *p) return _res; } -// _tmp_134: ':' | ',' (':' | '**') +// _tmp_135: ':' | ',' (':' | '**') static void * -_tmp_134_rule(Parser *p) +_tmp_135_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23191,18 +23307,18 @@ _tmp_134_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -23210,21 +23326,21 @@ _tmp_134_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_148_var; + void *_tmp_151_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_148_var = _tmp_148_rule(p)) // ':' | '**' + (_tmp_151_var = _tmp_151_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_148_var); + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_151_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -23233,9 +23349,9 @@ _tmp_134_rule(Parser *p) return _res; } -// _tmp_135: star_targets '=' +// _tmp_136: star_targets '=' static void * -_tmp_135_rule(Parser *p) +_tmp_136_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23249,7 +23365,7 @@ _tmp_135_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -23258,7 +23374,7 @@ _tmp_135_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23268,7 +23384,7 @@ _tmp_135_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -23277,9 +23393,9 @@ _tmp_135_rule(Parser *p) return _res; } -// _tmp_136: '.' | '...' +// _tmp_137: '.' | '...' static void * -_tmp_136_rule(Parser *p) +_tmp_137_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23293,18 +23409,18 @@ _tmp_136_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23312,18 +23428,18 @@ _tmp_136_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23332,9 +23448,9 @@ _tmp_136_rule(Parser *p) return _res; } -// _tmp_137: '.' | '...' +// _tmp_138: '.' | '...' static void * -_tmp_137_rule(Parser *p) +_tmp_138_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23348,18 +23464,18 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23367,18 +23483,18 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23387,9 +23503,9 @@ _tmp_137_rule(Parser *p) return _res; } -// _tmp_138: '@' named_expression NEWLINE +// _tmp_139: '@' named_expression NEWLINE static void * -_tmp_138_rule(Parser *p) +_tmp_139_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23403,7 +23519,7 @@ _tmp_138_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -23415,7 +23531,7 @@ _tmp_138_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23425,7 +23541,7 @@ _tmp_138_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -23434,9 +23550,9 @@ _tmp_138_rule(Parser *p) return _res; } -// _tmp_139: ',' star_expression +// _tmp_140: ',' star_expression static void * -_tmp_139_rule(Parser *p) +_tmp_140_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23450,7 +23566,7 @@ _tmp_139_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -23459,7 +23575,7 @@ _tmp_139_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23469,7 +23585,7 @@ _tmp_139_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -23478,9 +23594,9 @@ _tmp_139_rule(Parser *p) return _res; } -// _tmp_140: ',' expression +// _tmp_141: ',' expression static void * -_tmp_140_rule(Parser *p) +_tmp_141_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23494,7 +23610,7 @@ _tmp_140_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -23503,7 +23619,7 @@ _tmp_140_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23513,7 +23629,7 @@ _tmp_140_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -23522,9 +23638,9 @@ _tmp_140_rule(Parser *p) return _res; } -// _tmp_141: 'or' conjunction +// _tmp_142: 'or' conjunction static void * -_tmp_141_rule(Parser *p) +_tmp_142_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23538,7 +23654,7 @@ _tmp_141_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -23547,7 +23663,7 @@ _tmp_141_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23557,7 +23673,7 @@ _tmp_141_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -23566,9 +23682,9 @@ _tmp_141_rule(Parser *p) return _res; } -// _tmp_142: 'and' inversion +// _tmp_143: 'and' inversion static void * -_tmp_142_rule(Parser *p) +_tmp_143_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23582,7 +23698,7 @@ _tmp_142_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -23591,7 +23707,7 @@ _tmp_142_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23601,7 +23717,7 @@ _tmp_142_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -23610,9 +23726,9 @@ _tmp_142_rule(Parser *p) return _res; } -// _tmp_143: 'if' disjunction +// _tmp_144: 'if' disjunction static void * -_tmp_143_rule(Parser *p) +_tmp_144_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23626,7 +23742,7 @@ _tmp_143_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -23635,7 +23751,7 @@ _tmp_143_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23645,7 +23761,7 @@ _tmp_143_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -23654,9 +23770,9 @@ _tmp_143_rule(Parser *p) return _res; } -// _tmp_144: 'if' disjunction +// _tmp_145: 'if' disjunction static void * -_tmp_144_rule(Parser *p) +_tmp_145_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23670,7 +23786,7 @@ _tmp_144_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -23679,7 +23795,7 @@ _tmp_144_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23689,7 +23805,7 @@ _tmp_144_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -23698,9 +23814,9 @@ _tmp_144_rule(Parser *p) return _res; } -// _tmp_145: ',' star_target +// _tmp_146: ',' star_target static void * -_tmp_145_rule(Parser *p) +_tmp_146_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23714,7 +23830,7 @@ _tmp_145_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -23723,7 +23839,7 @@ _tmp_145_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23733,7 +23849,7 @@ _tmp_145_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -23742,9 +23858,87 @@ _tmp_145_rule(Parser *p) return _res; } -// _loop1_146: param_with_default +// _tmp_147: star_targets '=' +static void * +_tmp_147_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_targets '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + Token * _literal; + expr_ty star_targets_var; + if ( + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + _res = _PyPegen_dummy_name(p, star_targets_var, _literal); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_148: star_targets '=' +static void * +_tmp_148_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_targets '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + Token * _literal; + expr_ty star_targets_var; + if ( + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + _res = _PyPegen_dummy_name(p, star_targets_var, _literal); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_149: param_with_default static asdl_seq * -_loop1_146_rule(Parser *p) +_loop1_149_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23768,7 +23962,7 @@ _loop1_146_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -23790,7 +23984,7 @@ _loop1_146_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -23808,14 +24002,14 @@ _loop1_146_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_146_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_149_type, _seq); D(p->level--); return _seq; } -// _tmp_147: ')' | '**' +// _tmp_150: ')' | '**' static void * -_tmp_147_rule(Parser *p) +_tmp_150_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23829,18 +24023,18 @@ _tmp_147_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -23848,18 +24042,18 @@ _tmp_147_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -23868,9 +24062,9 @@ _tmp_147_rule(Parser *p) return _res; } -// _tmp_148: ':' | '**' +// _tmp_151: ':' | '**' static void * -_tmp_148_rule(Parser *p) +_tmp_151_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23884,18 +24078,18 @@ _tmp_148_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -23903,18 +24097,18 @@ _tmp_148_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; diff --git a/Parser/pegen/pegen.c b/Parser/pegen/pegen.c index afe75d7f862ee..7b581cadfb64a 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen/pegen.c @@ -161,6 +161,7 @@ byte_offset_to_character_offset(PyObject *line, int col_offset) const char * _PyPegen_get_expr_name(expr_ty e) { + assert(e != NULL); switch (e->kind) { case Attribute_kind: return "attribute"; From webhook-mailer at python.org Sun Jun 7 22:24:42 2020 From: webhook-mailer at python.org (Ned Deily) Date: Mon, 08 Jun 2020 02:24:42 -0000 Subject: [Python-checkins] bpo-40741: Update macOS installer to use SQLite 3.32.2. (GH-20705) Message-ID: https://github.com/python/cpython/commit/37eed5a9ee7c802e7151ee9939ed604032886639 commit: 37eed5a9ee7c802e7151ee9939ed604032886639 branch: master author: Ned Deily committer: GitHub date: 2020-06-07T22:24:33-04:00 summary: bpo-40741: Update macOS installer to use SQLite 3.32.2. (GH-20705) files: A Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 76553c93a4957..86a09ae5254a3 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -313,9 +313,9 @@ def library_recipes(): ), ), dict( - name="SQLite 3.31.1", - url="https://sqlite.org/2020/sqlite-autoconf-3310100.tar.gz", - checksum='2d0a553534c521504e3ac3ad3b90f125', + name="SQLite 3.32.2", + url="https://sqlite.org/2020/sqlite-autoconf-3320200.tar.gz", + checksum='eb498918a33159cdf8104997aad29e83', extra_cflags=('-Os ' '-DSQLITE_ENABLE_FTS5 ' '-DSQLITE_ENABLE_FTS4 ' diff --git a/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst b/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst new file mode 100644 index 0000000000000..6ff7b9a805b95 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst @@ -0,0 +1 @@ +Update macOS installer to use SQLite 3.32.2. From webhook-mailer at python.org Sun Jun 7 22:42:25 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Jun 2020 02:42:25 -0000 Subject: [Python-checkins] bpo-40741: Update macOS installer to use SQLite 3.32.2. (GH-20705) Message-ID: https://github.com/python/cpython/commit/d1c449a5c6e1cd1f245ec8b721c0f32675d63872 commit: d1c449a5c6e1cd1f245ec8b721c0f32675d63872 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-07T19:42:16-07:00 summary: bpo-40741: Update macOS installer to use SQLite 3.32.2. (GH-20705) (cherry picked from commit 37eed5a9ee7c802e7151ee9939ed604032886639) Co-authored-by: Ned Deily files: A Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 2b48cdfb860f7..8bde04ee04630 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -313,9 +313,9 @@ def library_recipes(): ), ), dict( - name="SQLite 3.31.1", - url="https://sqlite.org/2020/sqlite-autoconf-3310100.tar.gz", - checksum='2d0a553534c521504e3ac3ad3b90f125', + name="SQLite 3.32.2", + url="https://sqlite.org/2020/sqlite-autoconf-3320200.tar.gz", + checksum='eb498918a33159cdf8104997aad29e83', extra_cflags=('-Os ' '-DSQLITE_ENABLE_FTS5 ' '-DSQLITE_ENABLE_FTS4 ' diff --git a/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst b/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst new file mode 100644 index 0000000000000..6ff7b9a805b95 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst @@ -0,0 +1 @@ +Update macOS installer to use SQLite 3.32.2. From webhook-mailer at python.org Sun Jun 7 22:43:39 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Jun 2020 02:43:39 -0000 Subject: [Python-checkins] bpo-40741: Update macOS installer to use SQLite 3.32.2. (GH-20705) Message-ID: https://github.com/python/cpython/commit/264e4fd9619dfab3d9de7f78a46efd8772b03ea6 commit: 264e4fd9619dfab3d9de7f78a46efd8772b03ea6 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-07T19:43:35-07:00 summary: bpo-40741: Update macOS installer to use SQLite 3.32.2. (GH-20705) (cherry picked from commit 37eed5a9ee7c802e7151ee9939ed604032886639) Co-authored-by: Ned Deily files: A Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index bdfa6f4adf612..9d3e2a785a6c6 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -313,9 +313,9 @@ def library_recipes(): ), ), dict( - name="SQLite 3.31.1", - url="https://sqlite.org/2020/sqlite-autoconf-3310100.tar.gz", - checksum='2d0a553534c521504e3ac3ad3b90f125', + name="SQLite 3.32.2", + url="https://sqlite.org/2020/sqlite-autoconf-3320200.tar.gz", + checksum='eb498918a33159cdf8104997aad29e83', extra_cflags=('-Os ' '-DSQLITE_ENABLE_FTS5 ' '-DSQLITE_ENABLE_FTS4 ' diff --git a/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst b/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst new file mode 100644 index 0000000000000..6ff7b9a805b95 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst @@ -0,0 +1 @@ +Update macOS installer to use SQLite 3.32.2. From webhook-mailer at python.org Mon Jun 8 01:01:29 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Mon, 08 Jun 2020 05:01:29 -0000 Subject: [Python-checkins] bpo-22021: Update root_dir and base_dir documentation in shutil (GH-10367) Message-ID: https://github.com/python/cpython/commit/7633371dace67aaa21eb4b86f889441571ec4167 commit: 7633371dace67aaa21eb4b86f889441571ec4167 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-07T22:01:21-07:00 summary: bpo-22021: Update root_dir and base_dir documentation in shutil (GH-10367) Also added an example in shutil in order to make more clear how they are to be used. Initially reported by Weinan Li on bpo. files: M Doc/library/shutil.rst diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index c7c63e6f80844..1b094aeb9ca3d 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -570,12 +570,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. available), or "xztar" (if the :mod:`lzma` module is available). *root_dir* is a directory that will be the root directory of the - archive; for example, we typically chdir into *root_dir* before creating the - archive. + archive, all paths in the archive will be relative to it; for example, + we typically chdir into *root_dir* before creating the archive. *base_dir* is the directory where we start archiving from; i.e. *base_dir* will be the common prefix of all files and - directories in the archive. + directories in the archive. *base_dir* must be given relative + to *root_dir*. See :ref:`shutil-archiving-example-with-basedir` for how to + use *base_dir* and *root_dir* together. *root_dir* and *base_dir* both default to the current directory. @@ -727,6 +729,48 @@ The resulting archive contains: -rw-r--r-- tarek/staff 37192 2010-02-06 18:23:10 ./known_hosts +.. _shutil-archiving-example-with-basedir: + +Archiving example with *base_dir* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In this example, similar to the `one above `_, +we show how to use :func:`make_archive`, but this time with the usage of +*base_dir*. We now have the following directory structure: + +.. code-block:: shell-session + + $ tree tmp + tmp + ??? root + ??? structure + ??? content + ??? please_add.txt + ??? do_not_add.txt + +In the final archive, :file:`please_add.txt` should be included, but +:file:`do_not_add.txt` should not. Therefore we use the following:: + + >>> from shutil import make_archive + >>> import os + >>> archive_name = os.path.expanduser(os.path.join('~', 'myarchive')) + >>> make_archive( + ... archive_name, + ... 'tar', + ... root_dir='tmp/root', + ... base_dir='structure/content', + ... ) + '/Users/tarek/my_archive.tar' + +Listing the files in the resulting archive gives us: + +.. code-block:: shell-session + + $ python -m tarfile -l /Users/tarek/myarchive.tar + structure/content/ + structure/content/please_add.txt + + Querying the size of the output terminal ---------------------------------------- From webhook-mailer at python.org Mon Jun 8 01:07:14 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Jun 2020 05:07:14 -0000 Subject: [Python-checkins] bpo-22021: Update root_dir and base_dir documentation in shutil (GH-10367) Message-ID: https://github.com/python/cpython/commit/d5489a964fadc028c7086218702daf6fae087340 commit: d5489a964fadc028c7086218702daf6fae087340 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-07T22:07:06-07:00 summary: bpo-22021: Update root_dir and base_dir documentation in shutil (GH-10367) Also added an example in shutil in order to make more clear how they are to be used. Initially reported by Weinan Li on bpo. (cherry picked from commit 7633371dace67aaa21eb4b86f889441571ec4167) Co-authored-by: Lysandros Nikolaou files: M Doc/library/shutil.rst diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index 12e69a4ea040c..799505858f7f5 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -477,12 +477,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. available), or "xztar" (if the :mod:`lzma` module is available). *root_dir* is a directory that will be the root directory of the - archive; for example, we typically chdir into *root_dir* before creating the - archive. + archive, all paths in the archive will be relative to it; for example, + we typically chdir into *root_dir* before creating the archive. *base_dir* is the directory where we start archiving from; i.e. *base_dir* will be the common prefix of all files and - directories in the archive. + directories in the archive. *base_dir* must be given relative + to *root_dir*. See :ref:`shutil-archiving-example-with-basedir` for how to + use *base_dir* and *root_dir* together. *root_dir* and *base_dir* both default to the current directory. @@ -626,6 +628,48 @@ The resulting archive contains: -rw-r--r-- tarek/staff 37192 2010-02-06 18:23:10 ./known_hosts +.. _shutil-archiving-example-with-basedir: + +Archiving example with *base_dir* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In this example, similar to the `one above `_, +we show how to use :func:`make_archive`, but this time with the usage of +*base_dir*. We now have the following directory structure: + +.. code-block:: shell-session + + $ tree tmp + tmp + ??? root + ??? structure + ??? content + ??? please_add.txt + ??? do_not_add.txt + +In the final archive, :file:`please_add.txt` should be included, but +:file:`do_not_add.txt` should not. Therefore we use the following:: + + >>> from shutil import make_archive + >>> import os + >>> archive_name = os.path.expanduser(os.path.join('~', 'myarchive')) + >>> make_archive( + ... archive_name, + ... 'tar', + ... root_dir='tmp/root', + ... base_dir='structure/content', + ... ) + '/Users/tarek/my_archive.tar' + +Listing the files in the resulting archive gives us: + +.. code-block:: shell-session + + $ python -m tarfile -l /Users/tarek/myarchive.tar + structure/content/ + structure/content/please_add.txt + + Querying the size of the output terminal ---------------------------------------- From webhook-mailer at python.org Mon Jun 8 01:08:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Jun 2020 05:08:56 -0000 Subject: [Python-checkins] bpo-22021: Update root_dir and base_dir documentation in shutil (GH-10367) Message-ID: https://github.com/python/cpython/commit/12dfbae2ec30e7c90499129b17b6049bfd9bb2b6 commit: 12dfbae2ec30e7c90499129b17b6049bfd9bb2b6 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-07T22:08:51-07:00 summary: bpo-22021: Update root_dir and base_dir documentation in shutil (GH-10367) Also added an example in shutil in order to make more clear how they are to be used. Initially reported by Weinan Li on bpo. (cherry picked from commit 7633371dace67aaa21eb4b86f889441571ec4167) Co-authored-by: Lysandros Nikolaou files: M Doc/library/shutil.rst diff --git a/Doc/library/shutil.rst b/Doc/library/shutil.rst index bd24de7202321..25b749e57b27d 100644 --- a/Doc/library/shutil.rst +++ b/Doc/library/shutil.rst @@ -567,12 +567,14 @@ provided. They rely on the :mod:`zipfile` and :mod:`tarfile` modules. available), or "xztar" (if the :mod:`lzma` module is available). *root_dir* is a directory that will be the root directory of the - archive; for example, we typically chdir into *root_dir* before creating the - archive. + archive, all paths in the archive will be relative to it; for example, + we typically chdir into *root_dir* before creating the archive. *base_dir* is the directory where we start archiving from; i.e. *base_dir* will be the common prefix of all files and - directories in the archive. + directories in the archive. *base_dir* must be given relative + to *root_dir*. See :ref:`shutil-archiving-example-with-basedir` for how to + use *base_dir* and *root_dir* together. *root_dir* and *base_dir* both default to the current directory. @@ -724,6 +726,48 @@ The resulting archive contains: -rw-r--r-- tarek/staff 37192 2010-02-06 18:23:10 ./known_hosts +.. _shutil-archiving-example-with-basedir: + +Archiving example with *base_dir* +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +In this example, similar to the `one above `_, +we show how to use :func:`make_archive`, but this time with the usage of +*base_dir*. We now have the following directory structure: + +.. code-block:: shell-session + + $ tree tmp + tmp + ??? root + ??? structure + ??? content + ??? please_add.txt + ??? do_not_add.txt + +In the final archive, :file:`please_add.txt` should be included, but +:file:`do_not_add.txt` should not. Therefore we use the following:: + + >>> from shutil import make_archive + >>> import os + >>> archive_name = os.path.expanduser(os.path.join('~', 'myarchive')) + >>> make_archive( + ... archive_name, + ... 'tar', + ... root_dir='tmp/root', + ... base_dir='structure/content', + ... ) + '/Users/tarek/my_archive.tar' + +Listing the files in the resulting archive gives us: + +.. code-block:: shell-session + + $ python -m tarfile -l /Users/tarek/myarchive.tar + structure/content/ + structure/content/please_add.txt + + Querying the size of the output terminal ---------------------------------------- From webhook-mailer at python.org Mon Jun 8 02:51:48 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Mon, 08 Jun 2020 06:51:48 -0000 Subject: [Python-checkins] Deny eval() direct access to builtins (GH-20713) Message-ID: https://github.com/python/cpython/commit/3ff51d425ecd98b7ba5a12ca9f77eda73fbf9f53 commit: 3ff51d425ecd98b7ba5a12ca9f77eda73fbf9f53 branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-07T23:51:40-07:00 summary: Deny eval() direct access to builtins (GH-20713) files: M Lib/collections/__init__.py diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 03393f35b11c5..1e3b54ccf9cc9 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -407,7 +407,8 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non # Create all the named tuple methods to be added to the class namespace s = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))' - namespace = {'_tuple_new': tuple_new, '__name__': f'namedtuple_{typename}'} + namespace = {'_tuple_new': tuple_new, '__builtins__': None, + '__name__': f'namedtuple_{typename}'} __new__ = eval(s, namespace) __new__.__doc__ = f'Create new instance of {typename}({arg_list})' if defaults is not None: From webhook-mailer at python.org Mon Jun 8 03:52:51 2020 From: webhook-mailer at python.org (Ned Deily) Date: Mon, 08 Jun 2020 07:52:51 -0000 Subject: [Python-checkins] allow macOS installer builds to package pre-built html docs (GH-20715) Message-ID: https://github.com/python/cpython/commit/63fc55b2eab0331465605a49bfd28a1bcb997f92 commit: 63fc55b2eab0331465605a49bfd28a1bcb997f92 branch: master author: Ned Deily committer: GitHub date: 2020-06-08T03:52:43-04:00 summary: allow macOS installer builds to package pre-built html docs (GH-20715) build-installer now looks in its directory of source tarballs for a suitable html tarball of the same version. If so, it will unpack and use it rather than rebuilding the html format documentation set from the source repo. This is intended as a speedup for test builds of the installer. Files names must be in the same format as produced by the docs build for download, for example, `python-3.9.0b1-docs-html.tar.bz2`. files: M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 86a09ae5254a3..a2cba3210211d 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -1066,14 +1066,40 @@ def buildPythonDocs(): curDir = os.getcwd() os.chdir(buildDir) runCommand('make clean') - # Create virtual environment for docs builds with blurb and sphinx - runCommand('make venv') - runCommand('venv/bin/python3 -m pip install -U Sphinx==2.2.0') - runCommand('make html PYTHON=venv/bin/python') + + # Search third-party source directory for a pre-built version of the docs. + # Use the naming convention of the docs.python.org html downloads: + # python-3.9.0b1-docs-html.tar.bz2 + doctarfiles = [ f for f in os.listdir(DEPSRC) + if f.startswith('python-'+getFullVersion()) + if f.endswith('-docs-html.tar.bz2') ] + if doctarfiles: + doctarfile = doctarfiles[0] + if not os.path.exists('build'): + os.mkdir('build') + # if build directory existed, it was emptied by make clean, above + os.chdir('build') + # Extract the first archive found for this version into build + runCommand('tar xjf %s'%shellQuote(os.path.join(DEPSRC, doctarfile))) + # see if tar extracted a directory ending in -docs-html + archivefiles = [ f for f in os.listdir('.') + if f.endswith('-docs-html') + if os.path.isdir(f) ] + if archivefiles: + archivefile = archivefiles[0] + # make it our 'Docs/build/html' directory + print(' -- using pre-built python documentation from %s'%archivefile) + os.rename(archivefile, 'html') + os.chdir(buildDir) + + htmlDir = os.path.join('build', 'html') + if not os.path.exists(htmlDir): + # Create virtual environment for docs builds with blurb and sphinx + runCommand('make venv') + runCommand('venv/bin/python3 -m pip install -U Sphinx==2.2.0') + runCommand('make html PYTHON=venv/bin/python') + os.rename(htmlDir, docdir) os.chdir(curDir) - if not os.path.exists(docdir): - os.mkdir(docdir) - os.rename(os.path.join(buildDir, 'build', 'html'), docdir) def buildPython(): From webhook-mailer at python.org Mon Jun 8 10:11:53 2020 From: webhook-mailer at python.org (Shantanu) Date: Mon, 08 Jun 2020 14:11:53 -0000 Subject: [Python-checkins] bpo-33187: Document 3.9 changes to xml.etree.ElementInclude.include (GH-20438) Message-ID: https://github.com/python/cpython/commit/301f0d4ff9b6bd60599eea0612904f65a92e6dd9 commit: 301f0d4ff9b6bd60599eea0612904f65a92e6dd9 branch: master author: Shantanu committer: GitHub date: 2020-06-08T16:11:44+02:00 summary: bpo-33187: Document 3.9 changes to xml.etree.ElementInclude.include (GH-20438) Looks like the merging of bpo-33187 and bpo-20928 was racy, resulting in this change going undocumented. files: M Doc/library/xml.etree.elementtree.rst diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 658bc3a54f86e..2085a85927e46 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -816,16 +816,25 @@ Functions loader fails, it can return None or raise an exception. -.. function:: xml.etree.ElementInclude.include( elem, loader=None) +.. function:: xml.etree.ElementInclude.include( elem, loader=None, base_url=None, \ + max_depth=6) This function expands XInclude directives. *elem* is the root element. *loader* is an optional resource loader. If omitted, it defaults to :func:`default_loader`. If given, it should be a callable that implements the same interface as - :func:`default_loader`. Returns the expanded resource. If the parse mode is + :func:`default_loader`. *base_url* is base URL of the original file, to resolve + relative include file references. *max_depth* is the maximum number of recursive + inclusions. Limited to reduce the risk of malicious content explosion. Pass a + negative value to disable the limitation. + + Returns the expanded resource. If the parse mode is ``"xml"``, this is an ElementTree instance. If the parse mode is "text", this is a Unicode string. If the loader fails, it can return None or raise an exception. + .. versionadded:: 3.9 + The *base_url* and *max_depth* parameters. + .. _elementtree-element-objects: From webhook-mailer at python.org Mon Jun 8 10:30:38 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 08 Jun 2020 14:30:38 -0000 Subject: [Python-checkins] bpo-29882: Add _Py_popcount32() function (GH-20518) Message-ID: https://github.com/python/cpython/commit/c6b292cdeee689f0bfac6c1e2c2d4e4e01fa8d9e commit: c6b292cdeee689f0bfac6c1e2c2d4e4e01fa8d9e branch: master author: Victor Stinner committer: GitHub date: 2020-06-08T16:30:33+02:00 summary: bpo-29882: Add _Py_popcount32() function (GH-20518) * Rename pycore_byteswap.h to pycore_bitutils.h. * Move popcount_digit() to pycore_bitutils.h as _Py_popcount32(). * _Py_popcount32() uses GCC and clang builtin function if available. * Add unit tests to _Py_popcount32(). files: A Include/internal/pycore_bitutils.h D Include/internal/pycore_byteswap.h M Makefile.pre.in M Modules/_ctypes/cfield.c M Modules/_testinternalcapi.c M Modules/sha256module.c M Modules/sha512module.c M Objects/longobject.c M Objects/stringlib/codecs.h M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M Python/hamt.c diff --git a/Include/internal/pycore_byteswap.h b/Include/internal/pycore_bitutils.h similarity index 59% rename from Include/internal/pycore_byteswap.h rename to Include/internal/pycore_bitutils.h index 5e64704a004c8..36ffe23b9ff26 100644 --- a/Include/internal/pycore_byteswap.h +++ b/Include/internal/pycore_bitutils.h @@ -1,4 +1,6 @@ -/* Bytes swap functions, reverse order of bytes: +/* Bit and bytes utilities. + + Bytes swap functions, reverse order of bytes: - _Py_bswap16(uint16_t) - _Py_bswap32(uint32_t) @@ -82,6 +84,53 @@ _Py_bswap64(uint64_t word) } +// Population count: count the number of 1's in 'x' +// (number of bits set to 1), also known as the hamming weight. +// +// Implementation note. CPUID is not used, to test if x86 POPCNT instruction +// can be used, to keep the implementation simple. For example, Visual Studio +// __popcnt() is not used this reason. The clang and GCC builtin function can +// use the x86 POPCNT instruction if the target architecture has SSE4a or +// newer. +static inline int +_Py_popcount32(uint32_t x) +{ +#if (defined(__clang__) || defined(__GNUC__)) + +#if SIZEOF_INT >= 4 + Py_BUILD_ASSERT(sizeof(x) <= sizeof(unsigned int)); + return __builtin_popcount(x); +#else + // The C standard guarantees that unsigned long will always be big enough + // to hold a uint32_t value without losing information. + Py_BUILD_ASSERT(sizeof(x) <= sizeof(unsigned long)); + return __builtin_popcountl(x); +#endif + +#else + // 32-bit SWAR (SIMD Within A Register) popcount + + // Binary: 0 1 0 1 ... + const uint32_t M1 = 0x55555555; + // Binary: 00 11 00 11. .. + const uint32_t M2 = 0x33333333; + // Binary: 0000 1111 0000 1111 ... + const uint32_t M4 = 0x0F0F0F0F; + // 256**4 + 256**3 + 256**2 + 256**1 + const uint32_t SUM = 0x01010101; + + // Put count of each 2 bits into those 2 bits + x = x - ((x >> 1) & M1); + // Put count of each 4 bits into those 4 bits + x = (x & M2) + ((x >> 2) & M2); + // Put count of each 8 bits into those 8 bits + x = (x + (x >> 4)) & M4; + // Sum of the 4 byte counts + return (uint32_t)((uint64_t)x * (uint64_t)SUM) >> 24; +#endif +} + + #ifdef __cplusplus } #endif diff --git a/Makefile.pre.in b/Makefile.pre.in index 5a18704e44198..b115e7fc01f74 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1121,7 +1121,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_abstract.h \ $(srcdir)/Include/internal/pycore_accu.h \ $(srcdir)/Include/internal/pycore_atomic.h \ - $(srcdir)/Include/internal/pycore_byteswap.h \ + $(srcdir)/Include/internal/pycore_bitutils.h \ $(srcdir)/Include/internal/pycore_bytes_methods.h \ $(srcdir)/Include/internal/pycore_call.h \ $(srcdir)/Include/internal/pycore_ceval.h \ diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c index 32a2beeb744f7..3a9b7119201cf 100644 --- a/Modules/_ctypes/cfield.c +++ b/Modules/_ctypes/cfield.c @@ -1,5 +1,5 @@ #include "Python.h" -#include "pycore_byteswap.h" // _Py_bswap32() +#include "pycore_bitutils.h" // _Py_bswap32() #include #ifdef MS_WIN32 diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 5f217dcb8978e..6d5af5917f1f0 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -12,7 +12,7 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" -#include "pycore_byteswap.h" // _Py_bswap32() +#include "pycore_bitutils.h" // _Py_bswap32() #include "pycore_initconfig.h" // _Py_GetConfigsAsDict() #include "pycore_hashtable.h" // _Py_hashtable_new() #include "pycore_gc.h" // PyGC_Head @@ -63,6 +63,45 @@ test_bswap(PyObject *self, PyObject *Py_UNUSED(args)) } +static int +check_popcount(uint32_t x, int expected) +{ + // Use volatile to prevent the compiler to optimize out the whole test + volatile uint32_t u = x; + int bits = _Py_popcount32(u); + if (bits != expected) { + PyErr_Format(PyExc_AssertionError, + "_Py_popcount32(%lu) returns %i, expected %i", + (unsigned long)x, bits, expected); + return -1; + } + return 0; +} + + +static PyObject* +test_popcount(PyObject *self, PyObject *Py_UNUSED(args)) +{ +#define CHECK(X, RESULT) \ + do { \ + if (check_popcount(X, RESULT) < 0) { \ + return NULL; \ + } \ + } while (0) + + CHECK(0, 0); + CHECK(1, 1); + CHECK(0x08080808, 4); + CHECK(0x10101010, 4); + CHECK(0x10204080, 4); + CHECK(0xDEADCAFE, 22); + CHECK(0xFFFFFFFF, 32); + Py_RETURN_NONE; + +#undef CHECK +} + + #define TO_PTR(ch) ((void*)(uintptr_t)ch) #define FROM_PTR(ptr) ((uintptr_t)ptr) #define VALUE(key) (1 + ((int)(key) - 'a')) @@ -157,6 +196,7 @@ static PyMethodDef TestMethods[] = { {"get_configs", get_configs, METH_NOARGS}, {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, {"test_bswap", test_bswap, METH_NOARGS}, + {"test_popcount", test_popcount, METH_NOARGS}, {"test_hashtable", test_hashtable, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/sha256module.c b/Modules/sha256module.c index 8edb1d5382883..261f9daee2807 100644 --- a/Modules/sha256module.c +++ b/Modules/sha256module.c @@ -17,7 +17,7 @@ /* SHA objects */ #include "Python.h" -#include "pycore_byteswap.h" // _Py_bswap32() +#include "pycore_bitutils.h" // _Py_bswap32() #include "structmember.h" // PyMemberDef #include "hashlib.h" #include "pystrhex.h" diff --git a/Modules/sha512module.c b/Modules/sha512module.c index 561ef8ef0e867..aa2aeedcc6c64 100644 --- a/Modules/sha512module.c +++ b/Modules/sha512module.c @@ -17,7 +17,7 @@ /* SHA objects */ #include "Python.h" -#include "pycore_byteswap.h" // _Py_bswap32() +#include "pycore_bitutils.h" // _Py_bswap32() #include "structmember.h" // PyMemberDef #include "hashlib.h" #include "pystrhex.h" diff --git a/Objects/longobject.c b/Objects/longobject.c index 0b209a403c4b7..ce10c4f66586a 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -3,8 +3,9 @@ /* XXX The functional organization of this file is terrible */ #include "Python.h" -#include "pycore_interp.h" // _PY_NSMALLPOSINTS -#include "pycore_pystate.h" // _Py_IsMainInterpreter() +#include "pycore_bitutils.h" // _Py_popcount32() +#include "pycore_interp.h" // _PY_NSMALLPOSINTS +#include "pycore_pystate.h" // _Py_IsMainInterpreter() #include "longintrepr.h" #include @@ -5307,12 +5308,10 @@ int_bit_length_impl(PyObject *self) static int popcount_digit(digit d) { - /* 32bit SWAR popcount. */ - uint32_t u = d; - u -= (u >> 1) & 0x55555555U; - u = (u & 0x33333333U) + ((u >> 2) & 0x33333333U); - u = (u + (u >> 4)) & 0x0f0f0f0fU; - return (uint32_t)(u * 0x01010101U) >> 24; + // digit can be larger than uint32_t, but only PyLong_SHIFT bits + // of it will be ever used. + Py_BUILD_ASSERT(PyLong_SHIFT <= 32); + return _Py_popcount32((uint32_t)d); } /*[clinic input] diff --git a/Objects/stringlib/codecs.h b/Objects/stringlib/codecs.h index 9b2a29ba3b8c2..197605b012e5c 100644 --- a/Objects/stringlib/codecs.h +++ b/Objects/stringlib/codecs.h @@ -4,7 +4,7 @@ # error "codecs.h is specific to Unicode" #endif -#include "pycore_byteswap.h" // _Py_bswap32() +#include "pycore_bitutils.h" // _Py_bswap32() /* Mask to quickly check whether a C 'long' contains a non-ASCII, UTF8-encoded char. */ diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index b6b0cf3e991ba..8d5f99f8336a3 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -170,7 +170,7 @@ - + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 10dfffba6113e..7bc9f8f166456 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -201,7 +201,7 @@ Include - + Include diff --git a/Python/hamt.c b/Python/hamt.c index 8801c5ea418c7..e272e8808fd95 100644 --- a/Python/hamt.c +++ b/Python/hamt.c @@ -1,5 +1,6 @@ #include "Python.h" +#include "pycore_bitutils.h" // _Py_popcount32 #include "pycore_hamt.h" #include "pycore_object.h" // _PyObject_GC_TRACK() #include // offsetof() @@ -433,30 +434,10 @@ hamt_bitpos(int32_t hash, uint32_t shift) return (uint32_t)1 << hamt_mask(hash, shift); } -static inline uint32_t -hamt_bitcount(uint32_t i) -{ - /* We could use native popcount instruction but that would - require to either add configure flags to enable SSE4.2 - support or to detect it dynamically. Otherwise, we have - a risk of CPython not working properly on older hardware. - - In practice, there's no observable difference in - performance between using a popcount instruction or the - following fallback code. - - The algorithm is copied from: - https://graphics.stanford.edu/~seander/bithacks.html - */ - i = i - ((i >> 1) & 0x55555555); - i = (i & 0x33333333) + ((i >> 2) & 0x33333333); - return (((i + (i >> 4)) & 0xF0F0F0F) * 0x1010101) >> 24; -} - static inline uint32_t hamt_bitindex(uint32_t bitmap, uint32_t bit) { - return hamt_bitcount(bitmap & (bit - 1)); + return (uint32_t)_Py_popcount32(bitmap & (bit - 1)); } @@ -820,7 +801,7 @@ hamt_node_bitmap_assoc(PyHamtNode_Bitmap *self, else { /* There was no key before with the same (shift,hash). */ - uint32_t n = hamt_bitcount(self->b_bitmap); + uint32_t n = (uint32_t)_Py_popcount32(self->b_bitmap); if (n >= 16) { /* When we have a situation where we want to store more From webhook-mailer at python.org Mon Jun 8 11:28:21 2020 From: webhook-mailer at python.org (Sandro Mani) Date: Mon, 08 Jun 2020 15:28:21 -0000 Subject: [Python-checkins] bpo-40854: Allow overriding sys.platlibdir via PYTHONPLATLIBDIR env-var (GH-20605) Message-ID: https://github.com/python/cpython/commit/8f023a2f664f902a3d0b7a6f64d63afc0d1c15ae commit: 8f023a2f664f902a3d0b7a6f64d63afc0d1c15ae branch: master author: Sandro Mani committer: GitHub date: 2020-06-08T17:28:11+02:00 summary: bpo-40854: Allow overriding sys.platlibdir via PYTHONPLATLIBDIR env-var (GH-20605) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-03-13-53-24.bpo-40854.O6vfQU.rst M Doc/c-api/init_config.rst M Doc/using/cmdline.rst M Include/cpython/initconfig.h M Lib/test/test_embed.py M Makefile.pre.in M Misc/python.man M Modules/getpath.c M Programs/_testembed.c M Python/initconfig.c M Python/sysmodule.c diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index fc82c3eb59024..7b8e894fe22dd 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -436,6 +436,14 @@ PyConfig :data:`sys.base_prefix`. + .. c:member:: wchar_t* platlibdir + + :data:`sys.platlibdir`: platform library directory name, set at configure time + by ``--with-platlibdir``, overrideable by the ``PYTHONPLATLIBDIR`` + environment variable. + + .. versionadded:: 3.10 + .. c:member:: int buffered_stdio If equals to 0, enable unbuffered mode, making the stdout and stderr @@ -884,6 +892,7 @@ Path Configuration * Path configuration inputs: * :c:member:`PyConfig.home` + * :c:member:`PyConfig.platlibdir` * :c:member:`PyConfig.pathconfig_warnings` * :c:member:`PyConfig.program_name` * :c:member:`PyConfig.pythonpath_env` diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index b0911956a9eb8..3e0797279d6bf 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -538,6 +538,14 @@ conflict. within a Python program as the variable :data:`sys.path`. +.. envvar:: PYTHONPLATLIBDIR + + If this is set to a non-empty string, it overrides the :data:`sys.platlibdir` + value. + + .. versionadded:: 3.10 + + .. envvar:: PYTHONSTARTUP If this is the name of a readable file, the Python commands in that file are diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index e9c2e6bec3861..563c2bacfa428 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -385,6 +385,7 @@ typedef struct { wchar_t *base_prefix; /* sys.base_prefix */ wchar_t *exec_prefix; /* sys.exec_prefix */ wchar_t *base_exec_prefix; /* sys.base_exec_prefix */ + wchar_t *platlibdir; /* sys.platlibdir */ /* --- Parameter only used by Py_Main() ---------- */ diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 3d60b2f330c62..f1371db866924 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -380,6 +380,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): 'exec_prefix': GET_DEFAULT_CONFIG, 'base_exec_prefix': GET_DEFAULT_CONFIG, 'module_search_paths': GET_DEFAULT_CONFIG, + 'platlibdir': sys.platlibdir, 'site_import': 1, 'bytes_warning': 0, @@ -585,13 +586,14 @@ def get_expected_config(self, expected_preconfig, expected, env, api, if value is self.GET_DEFAULT_CONFIG: expected[key] = config[key] - pythonpath_env = expected['pythonpath_env'] - if pythonpath_env is not None: - paths = pythonpath_env.split(os.path.pathsep) - expected['module_search_paths'] = [*paths, *expected['module_search_paths']] - if modify_path_cb is not None: - expected['module_search_paths'] = expected['module_search_paths'].copy() - modify_path_cb(expected['module_search_paths']) + if expected['module_search_paths'] is not self.IGNORE_CONFIG: + pythonpath_env = expected['pythonpath_env'] + if pythonpath_env is not None: + paths = pythonpath_env.split(os.path.pathsep) + expected['module_search_paths'] = [*paths, *expected['module_search_paths']] + if modify_path_cb is not None: + expected['module_search_paths'] = expected['module_search_paths'].copy() + modify_path_cb(expected['module_search_paths']) for key in self.COPY_PRE_CONFIG: if key not in expected_preconfig: @@ -764,6 +766,8 @@ def test_init_from_config(self): 'buffered_stdio': 0, 'user_site_directory': 0, 'faulthandler': 1, + 'platlibdir': 'my_platlibdir', + 'module_search_paths': self.IGNORE_CONFIG, 'check_hash_pycs_mode': 'always', 'pathconfig_warnings': 0, @@ -795,6 +799,8 @@ def test_init_compat_env(self): 'user_site_directory': 0, 'faulthandler': 1, 'warnoptions': ['EnvVar'], + 'platlibdir': 'env_platlibdir', + 'module_search_paths': self.IGNORE_CONFIG, '_use_peg_parser': 0, } self.check_all_configs("test_init_compat_env", config, preconfig, @@ -823,6 +829,8 @@ def test_init_python_env(self): 'user_site_directory': 0, 'faulthandler': 1, 'warnoptions': ['EnvVar'], + 'platlibdir': 'env_platlibdir', + 'module_search_paths': self.IGNORE_CONFIG, '_use_peg_parser': 0, } self.check_all_configs("test_init_python_env", config, preconfig, diff --git a/Makefile.pre.in b/Makefile.pre.in index b115e7fc01f74..9cb7a23eea582 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -811,6 +811,11 @@ Python/sysmodule.o: $(srcdir)/Python/sysmodule.c Makefile $(srcdir)/Include/pydt $(MULTIARCH_CPPFLAGS) \ -o $@ $(srcdir)/Python/sysmodule.c +Python/initconfig.o: $(srcdir)/Python/initconfig.c + $(CC) -c $(PY_CORE_CFLAGS) \ + -DPLATLIBDIR='"$(PLATLIBDIR)"' \ + -o $@ $(srcdir)/Python/initconfig.c + $(IO_OBJS): $(IO_H) .PHONY: regen-grammar diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-03-13-53-24.bpo-40854.O6vfQU.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-03-13-53-24.bpo-40854.O6vfQU.rst new file mode 100644 index 0000000000000..6ef4ed5af7318 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-03-13-53-24.bpo-40854.O6vfQU.rst @@ -0,0 +1 @@ +Allow overriding :data:`sys.platlibdir` via a new :envvar:`PYTHONPLATLIBDIR` environment variable. diff --git a/Misc/python.man b/Misc/python.man index 89a15a5e7b2ff..74b2d72939eeb 100644 --- a/Misc/python.man +++ b/Misc/python.man @@ -413,6 +413,8 @@ inserted in the path in front of $PYTHONPATH. The search path can be manipulated from within a Python program as the variable .IR sys.path . +.IP PYTHONPLATLIBDIR +Override sys.platlibdir. .IP PYTHONSTARTUP If this is the name of a readable file, the Python commands in that file are executed before the first prompt is displayed in interactive diff --git a/Modules/getpath.c b/Modules/getpath.c index d9829f8ad3dbd..469c9ca010640 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -105,8 +105,8 @@ extern "C" { #if (!defined(PREFIX) || !defined(EXEC_PREFIX) \ - || !defined(VERSION) || !defined(VPATH) || !defined(PLATLIBDIR)) -#error "PREFIX, EXEC_PREFIX, VERSION, VPATH and PLATLIBDIR macros must be defined" + || !defined(VERSION) || !defined(VPATH)) +#error "PREFIX, EXEC_PREFIX, VERSION and VPATH macros must be defined" #endif #ifndef LANDMARK @@ -128,7 +128,6 @@ typedef struct { wchar_t *pythonpath_macro; /* PYTHONPATH macro */ wchar_t *prefix_macro; /* PREFIX macro */ wchar_t *exec_prefix_macro; /* EXEC_PREFIX macro */ - wchar_t *platlibdir_macro; /* PLATLIBDIR macro */ wchar_t *vpath_macro; /* VPATH macro */ wchar_t *lib_python; /* "lib/pythonX.Y" */ @@ -138,6 +137,7 @@ typedef struct { int warnings; const wchar_t *pythonpath_env; + const wchar_t *platlibdir; wchar_t *argv0_path; wchar_t *zip_path; @@ -811,7 +811,7 @@ calculate_exec_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig) } /* / "lib-dynload" */ - wchar_t *lib_dynload = joinpath2(calculate->platlibdir_macro, + wchar_t *lib_dynload = joinpath2(calculate->platlibdir, L"lib-dynload"); if (lib_dynload == NULL) { return _PyStatus_NO_MEMORY(); @@ -1297,7 +1297,7 @@ calculate_zip_path(PyCalculatePath *calculate) PyStatus res; /* Path: / "pythonXY.zip" */ - wchar_t *path = joinpath2(calculate->platlibdir_macro, L"python" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) L".zip"); + wchar_t *path = joinpath2(calculate->platlibdir, L"python" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) L".zip"); if (path == NULL) { return _PyStatus_NO_MEMORY(); } @@ -1451,10 +1451,6 @@ calculate_init(PyCalculatePath *calculate, const PyConfig *config) if (!calculate->vpath_macro) { return DECODE_LOCALE_ERR("VPATH macro", len); } - calculate->platlibdir_macro = Py_DecodeLocale(PLATLIBDIR, &len); - if (!calculate->platlibdir_macro) { - return DECODE_LOCALE_ERR("PLATLIBDIR macro", len); - } calculate->lib_python = Py_DecodeLocale(PLATLIBDIR "/python" VERSION, &len); if (!calculate->lib_python) { @@ -1463,6 +1459,7 @@ calculate_init(PyCalculatePath *calculate, const PyConfig *config) calculate->warnings = config->pathconfig_warnings; calculate->pythonpath_env = config->pythonpath_env; + calculate->platlibdir = config->platlibdir; return _PyStatus_OK(); } @@ -1475,7 +1472,6 @@ calculate_free(PyCalculatePath *calculate) PyMem_RawFree(calculate->prefix_macro); PyMem_RawFree(calculate->exec_prefix_macro); PyMem_RawFree(calculate->vpath_macro); - PyMem_RawFree(calculate->platlibdir_macro); PyMem_RawFree(calculate->lib_python); PyMem_RawFree(calculate->path_env); PyMem_RawFree(calculate->zip_path); diff --git a/Programs/_testembed.c b/Programs/_testembed.c index 5c83678f650d0..11524dfbc0d58 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -548,6 +548,13 @@ static int test_init_from_config(void) /* FIXME: test home */ /* FIXME: test path config: module_search_path .. dll_path */ + putenv("PYTHONPLATLIBDIR=env_platlibdir"); + status = PyConfig_SetBytesString(&config, &config.platlibdir, "my_platlibdir"); + if (PyStatus_Exception(status)) { + PyConfig_Clear(&config); + Py_ExitStatusException(status); + } + putenv("PYTHONVERBOSE=0"); Py_VerboseFlag = 0; config.verbose = 1; @@ -668,6 +675,7 @@ static void set_most_env_vars(void) putenv("PYTHONFAULTHANDLER=1"); putenv("PYTHONIOENCODING=iso8859-1:replace"); putenv("PYTHONOLDPARSER=1"); + putenv("PYTHONPLATLIBDIR=env_platlibdir"); } diff --git a/Python/initconfig.c b/Python/initconfig.c index 185935c05fb28..834b8ed943023 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -24,6 +24,10 @@ # endif #endif +#ifndef PLATLIBDIR +# error "PLATLIBDIR macro must be defined" +#endif + /* --- Command line options --------------------------------------- */ @@ -110,6 +114,7 @@ PYTHONPATH : '%lc'-separated list of directories prefixed to the\n\ static const char usage_5[] = "PYTHONHOME : alternate directory (or %lc).\n" " The default module search path uses %s.\n" +"PYTHONPLATLIBDIR : override sys.platlibdir.\n" "PYTHONCASEOK : ignore case in 'import' statements (Windows).\n" "PYTHONUTF8: if set to 1, enable the UTF-8 mode.\n" "PYTHONIOENCODING: Encoding[:errors] used for stdin/stdout/stderr.\n" @@ -588,6 +593,7 @@ PyConfig_Clear(PyConfig *config) CLEAR(config->base_prefix); CLEAR(config->exec_prefix); CLEAR(config->base_exec_prefix); + CLEAR(config->platlibdir); CLEAR(config->filesystem_encoding); CLEAR(config->filesystem_errors); @@ -824,6 +830,7 @@ _PyConfig_Copy(PyConfig *config, const PyConfig *config2) COPY_WSTR_ATTR(base_prefix); COPY_WSTR_ATTR(exec_prefix); COPY_WSTR_ATTR(base_exec_prefix); + COPY_WSTR_ATTR(platlibdir); COPY_ATTR(site_import); COPY_ATTR(bytes_warning); @@ -926,6 +933,7 @@ config_as_dict(const PyConfig *config) SET_ITEM_WSTR(base_prefix); SET_ITEM_WSTR(exec_prefix); SET_ITEM_WSTR(base_exec_prefix); + SET_ITEM_WSTR(platlibdir); SET_ITEM_INT(site_import); SET_ITEM_INT(bytes_warning); SET_ITEM_INT(inspect); @@ -1336,6 +1344,14 @@ config_read_env_vars(PyConfig *config) } } + if(config->platlibdir == NULL) { + status = CONFIG_GET_ENV_DUP(config, &config->platlibdir, + L"PYTHONPLATLIBDIR", "PYTHONPLATLIBDIR"); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + } + if (config->use_hash_seed < 0) { status = config_init_hash_seed(config); if (_PyStatus_EXCEPTION(status)) { @@ -1731,6 +1747,14 @@ config_read(PyConfig *config) } } + if(config->platlibdir == NULL) { + status = CONFIG_SET_BYTES_STR(config, &config->platlibdir, PLATLIBDIR, + "PLATLIBDIR macro"); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + } + if (config->_install_importlib) { status = _PyConfig_InitPathConfig(config); if (_PyStatus_EXCEPTION(status)) { @@ -2554,6 +2578,7 @@ PyConfig_Read(PyConfig *config) assert(config->exec_prefix != NULL); assert(config->base_exec_prefix != NULL); } + assert(config->platlibdir != NULL); assert(config->filesystem_encoding != NULL); assert(config->filesystem_errors != NULL); assert(config->stdio_encoding != NULL); @@ -2704,6 +2729,7 @@ _Py_DumpPathConfig(PyThreadState *tstate) DUMP_SYS(_base_executable); DUMP_SYS(base_prefix); DUMP_SYS(base_exec_prefix); + DUMP_SYS(platlibdir); DUMP_SYS(executable); DUMP_SYS(prefix); DUMP_SYS(exec_prefix); diff --git a/Python/sysmodule.c b/Python/sysmodule.c index e3fe1436145b4..3e4115fe8e1f9 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -2922,13 +2922,7 @@ _PySys_InitMain(PyThreadState *tstate) SET_SYS_FROM_WSTR("base_prefix", config->base_prefix); SET_SYS_FROM_WSTR("exec_prefix", config->exec_prefix); SET_SYS_FROM_WSTR("base_exec_prefix", config->base_exec_prefix); - { - PyObject *str = PyUnicode_FromString(PLATLIBDIR); - if (str == NULL) { - return -1; - } - SET_SYS_FROM_STRING("platlibdir", str); - } + SET_SYS_FROM_WSTR("platlibdir", config->platlibdir); if (config->pycache_prefix != NULL) { SET_SYS_FROM_WSTR("pycache_prefix", config->pycache_prefix); From webhook-mailer at python.org Mon Jun 8 12:13:08 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 08 Jun 2020 16:13:08 -0000 Subject: [Python-checkins] bpo-40910: Export Py_GetArgcArgv() function (GH-20721) Message-ID: https://github.com/python/cpython/commit/e81f6e687d0f04a45f2389d0b43fafd6d8491624 commit: e81f6e687d0f04a45f2389d0b43fafd6d8491624 branch: master author: Victor Stinner committer: GitHub date: 2020-06-08T18:12:59+02:00 summary: bpo-40910: Export Py_GetArgcArgv() function (GH-20721) Export explicitly the Py_GetArgcArgv() function to the C API and document the function. Previously, it was exported implicitly which no longer works since Python is built with -fvisibility=hidden. * Add PyConfig._orig_argv member. * Py_InitializeFromConfig() no longer calls _PyConfig_Write() twice. * PyConfig_Read() no longer initializes Py_GetArgcArgv(): it is now _PyConfig_Write() responsibility. * _PyConfig_Write() result type becomes PyStatus instead of void. * Write an unit test on Py_GetArgcArgv(). files: A Misc/NEWS.d/next/C API/2020-06-08-15-59-06.bpo-40910.L56oI0.rst M Doc/c-api/init_config.rst M Include/cpython/initconfig.h M Include/internal/pycore_initconfig.h M Lib/test/test_embed.py M PC/python3.def M Programs/_testembed.c M Python/bootstrap_hash.c M Python/initconfig.c M Python/pylifecycle.c diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 7b8e894fe22dd..c51b157bbb33e 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -43,6 +43,7 @@ Functions: * :c:func:`Py_PreInitializeFromArgs` * :c:func:`Py_PreInitializeFromBytesArgs` * :c:func:`Py_RunMain` +* :c:func:`Py_GetArgcArgv` The preconfiguration (``PyPreConfig`` type) is stored in ``_PyRuntime.preconfig`` and the configuration (``PyConfig`` type) is stored in @@ -984,6 +985,14 @@ customized Python always running in isolated mode using :c:func:`Py_RunMain`. +Py_GetArgcArgv() +---------------- + +.. c:function:: void Py_GetArgcArgv(int *argc, wchar_t ***argv) + + Get the original command line arguments, before Python modified them. + + Multi-Phase Initialization Private Provisional API -------------------------------------------------- diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index 563c2bacfa428..57933211bb937 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -411,6 +411,14 @@ typedef struct { /* If non-zero, disallow threads, subprocesses, and fork. Default: 0. */ int _isolated_interpreter; + + /* Original command line arguments. If _orig_argv is empty and _argv is + not equal to [''], PyConfig_Read() copies the configuration 'argv' list + into '_orig_argv' list before modifying 'argv' list (if parse_argv + is non-zero). + + _PyConfig_Write() initializes Py_GetArgcArgv() to this list. */ + PyWideStringList _orig_argv; } PyConfig; PyAPI_FUNC(void) PyConfig_InitPythonConfig(PyConfig *config); @@ -436,5 +444,13 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config, PyWideStringList *list, Py_ssize_t length, wchar_t **items); + +/* --- Helper functions --------------------------------------- */ + +/* Get the original command line arguments, before Python modified them. + + See also PyConfig._orig_argv. */ +PyAPI_FUNC(void) Py_GetArgcArgv(int *argc, wchar_t ***argv); + #endif /* !Py_LIMITED_API */ #endif /* !Py_PYCORECONFIG_H */ diff --git a/Include/internal/pycore_initconfig.h b/Include/internal/pycore_initconfig.h index 8c6706c95cbd1..457a005860b20 100644 --- a/Include/internal/pycore_initconfig.h +++ b/Include/internal/pycore_initconfig.h @@ -150,7 +150,7 @@ extern PyStatus _PyConfig_Copy( PyConfig *config, const PyConfig *config2); extern PyStatus _PyConfig_InitPathConfig(PyConfig *config); -extern void _PyConfig_Write(const PyConfig *config, +extern PyStatus _PyConfig_Write(const PyConfig *config, struct pyruntimestate *runtime); extern PyStatus _PyConfig_SetPyArgv( PyConfig *config, diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index f1371db866924..b7b70589da52b 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -366,6 +366,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): 'program_name': GET_DEFAULT_CONFIG, 'parse_argv': 0, 'argv': [""], + '_orig_argv': [], 'xoptions': [], 'warnoptions': [], @@ -739,7 +740,12 @@ def test_init_from_config(self): 'pycache_prefix': 'conf_pycache_prefix', 'program_name': './conf_program_name', - 'argv': ['-c', 'arg2', ], + 'argv': ['-c', 'arg2'], + '_orig_argv': ['python3', + '-W', 'cmdline_warnoption', + '-X', 'cmdline_xoption', + '-c', 'pass', + 'arg2'], 'parse_argv': 1, 'xoptions': [ 'config_xoption1=3', @@ -872,6 +878,7 @@ def test_preinit_parse_argv(self): } config = { 'argv': ['script.py'], + '_orig_argv': ['python3', '-X', 'dev', 'script.py'], 'run_filename': os.path.abspath('script.py'), 'dev_mode': 1, 'faulthandler': 1, @@ -886,9 +893,14 @@ def test_preinit_dont_parse_argv(self): preconfig = { 'isolated': 0, } + argv = ["python3", + "-E", "-I", + "-X", "dev", + "-X", "utf8", + "script.py"] config = { - 'argv': ["python3", "-E", "-I", - "-X", "dev", "-X", "utf8", "script.py"], + 'argv': argv, + '_orig_argv': argv, 'isolated': 0, } self.check_all_configs("test_preinit_dont_parse_argv", config, preconfig, @@ -967,6 +979,9 @@ def test_init_sys_add(self): 'ignore:::sysadd_warnoption', 'ignore:::config_warnoption', ], + '_orig_argv': ['python3', + '-W', 'ignore:::cmdline_warnoption', + '-X', 'cmdline_xoption'], } self.check_all_configs("test_init_sys_add", config, api=API_PYTHON) @@ -975,6 +990,7 @@ def test_init_run_main(self): 'print(json.dumps(_testinternalcapi.get_configs()))') config = { 'argv': ['-c', 'arg2'], + '_orig_argv': ['python3', '-c', code, 'arg2'], 'program_name': './python3', 'run_command': code + '\n', 'parse_argv': 1, @@ -986,6 +1002,9 @@ def test_init_main(self): 'print(json.dumps(_testinternalcapi.get_configs()))') config = { 'argv': ['-c', 'arg2'], + '_orig_argv': ['python3', + '-c', code, + 'arg2'], 'program_name': './python3', 'run_command': code + '\n', 'parse_argv': 1, @@ -999,6 +1018,7 @@ def test_init_parse_argv(self): config = { 'parse_argv': 1, 'argv': ['-c', 'arg1', '-v', 'arg3'], + '_orig_argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], 'program_name': './argv0', 'run_command': 'pass\n', 'use_environment': 0, @@ -1012,6 +1032,7 @@ def test_init_dont_parse_argv(self): config = { 'parse_argv': 0, 'argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], + '_orig_argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], 'program_name': './argv0', } self.check_all_configs("test_init_dont_parse_argv", config, pre_config, @@ -1299,10 +1320,17 @@ def test_init_warnoptions(self): 'faulthandler': 1, 'bytes_warning': 1, 'warnoptions': warnoptions, + '_orig_argv': ['python3', + '-Wignore:::cmdline1', + '-Wignore:::cmdline2'], } self.check_all_configs("test_init_warnoptions", config, preconfig, api=API_PYTHON) + def test_get_argc_argv(self): + self.run_embedded_interpreter("test_get_argc_argv") + # ignore output + class AuditingTests(EmbeddingTestsMixin, unittest.TestCase): def test_open_code_hook(self): diff --git a/Misc/NEWS.d/next/C API/2020-06-08-15-59-06.bpo-40910.L56oI0.rst b/Misc/NEWS.d/next/C API/2020-06-08-15-59-06.bpo-40910.L56oI0.rst new file mode 100644 index 0000000000000..1d0cb0b0235bf --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-08-15-59-06.bpo-40910.L56oI0.rst @@ -0,0 +1,3 @@ +Export explicitly the :c:func:`Py_GetArgcArgv` function to the C API and +document the function. Previously, it was exported implicitly which no +longer works since Python is built with ``-fvisibility=hidden``. diff --git a/PC/python3.def b/PC/python3.def index 6d54d4eaf71f0..2a6aaf4331ea5 100644 --- a/PC/python3.def +++ b/PC/python3.def @@ -734,6 +734,7 @@ EXPORTS Py_FinalizeEx=python310.Py_FinalizeEx Py_GenericAlias=python310.Py_GenericAlias Py_GenericAliasType=python310.Py_GenericAliasType + Py_GetArgcArgv=python310.Py_GetArgcArgv Py_GetBuildInfo=python310.Py_GetBuildInfo Py_GetCompiler=python310.Py_GetCompiler Py_GetCopyright=python310.Py_GetCopyright diff --git a/Programs/_testembed.c b/Programs/_testembed.c index 11524dfbc0d58..d89f6be6570e3 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -1334,6 +1334,7 @@ static int test_init_read_set(void) return 0; fail: + PyConfig_Clear(&config); Py_ExitStatusException(status); } @@ -1592,6 +1593,46 @@ static int test_run_main(void) } +static int test_get_argc_argv(void) +{ + PyConfig config; + PyConfig_InitPythonConfig(&config); + + wchar_t *argv[] = {L"python3", L"-c", + (L"import sys; " + L"print(f'Py_RunMain(): sys.argv={sys.argv}')"), + L"arg2"}; + config_set_argv(&config, Py_ARRAY_LENGTH(argv), argv); + config_set_string(&config, &config.program_name, L"./python3"); + + // Calling PyConfig_Read() twice must not change Py_GetArgcArgv() result. + // The second call is done by Py_InitializeFromConfig(). + PyStatus status = PyConfig_Read(&config); + if (PyStatus_Exception(status)) { + PyConfig_Clear(&config); + Py_ExitStatusException(status); + } + + init_from_config_clear(&config); + + int get_argc; + wchar_t **get_argv; + Py_GetArgcArgv(&get_argc, &get_argv); + printf("argc: %i\n", get_argc); + assert(get_argc == Py_ARRAY_LENGTH(argv)); + for (int i=0; i < get_argc; i++) { + printf("argv[%i]: %ls\n", i, get_argv[i]); + assert(wcscmp(get_argv[i], argv[i]) == 0); + } + + Py_Finalize(); + + printf("\n"); + printf("test ok\n"); + return 0; +} + + /* ********************************************************* * List of test cases and the function that implements it. * @@ -1649,6 +1690,7 @@ static struct TestCase TestCases[] = { {"test_init_setpythonhome", test_init_setpythonhome}, {"test_init_warnoptions", test_init_warnoptions}, {"test_run_main", test_run_main}, + {"test_get_argc_argv", test_get_argc_argv}, {"test_open_code_hook", test_open_code_hook}, {"test_audit", test_audit}, diff --git a/Python/bootstrap_hash.c b/Python/bootstrap_hash.c index b2109275014b2..47369305ee88e 100644 --- a/Python/bootstrap_hash.c +++ b/Python/bootstrap_hash.c @@ -580,7 +580,7 @@ _Py_HashRandomization_Init(const PyConfig *config) res = pyurandom(secret, secret_size, 0, 0); if (res < 0) { return _PyStatus_ERR("failed to get random numbers " - "to initialize Python"); + "to initialize Python"); } } return _PyStatus_OK(); diff --git a/Python/initconfig.c b/Python/initconfig.c index 834b8ed943023..998ceb7bbfa51 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -548,8 +548,6 @@ _Py_SetArgcArgv(Py_ssize_t argc, wchar_t * const *argv) } -/* Make the *original* argc/argv available to other modules. - This is rare, but it is needed by the secureware extension. */ void Py_GetArgcArgv(int *argc, wchar_t ***argv) { @@ -859,6 +857,7 @@ _PyConfig_Copy(PyConfig *config, const PyConfig *config2) COPY_ATTR(pathconfig_warnings); COPY_ATTR(_init_main); COPY_ATTR(_isolated_interpreter); + COPY_WSTRLIST(_orig_argv); #undef COPY_ATTR #undef COPY_WSTR_ATTR @@ -960,6 +959,7 @@ config_as_dict(const PyConfig *config) SET_ITEM_INT(pathconfig_warnings); SET_ITEM_INT(_init_main); SET_ITEM_INT(_isolated_interpreter); + SET_ITEM_WSTRLIST(_orig_argv); return dict; @@ -1856,7 +1856,7 @@ config_init_stdio(const PyConfig *config) - set Py_xxx global configuration variables - initialize C standard streams (stdin, stdout, stderr) */ -void +PyStatus _PyConfig_Write(const PyConfig *config, _PyRuntimeState *runtime) { config_set_global_vars(config); @@ -1870,6 +1870,13 @@ _PyConfig_Write(const PyConfig *config, _PyRuntimeState *runtime) preconfig->isolated = config->isolated; preconfig->use_environment = config->use_environment; preconfig->dev_mode = config->dev_mode; + + if (_Py_SetArgcArgv(config->_orig_argv.length, + config->_orig_argv.items) < 0) + { + return _PyStatus_NO_MEMORY(); + } + return _PyStatus_OK(); } @@ -2493,7 +2500,6 @@ PyStatus PyConfig_Read(PyConfig *config) { PyStatus status; - PyWideStringList orig_argv = _PyWideStringList_INIT; status = _Py_PreInitializeFromConfig(config, NULL); if (_PyStatus_EXCEPTION(status)) { @@ -2502,8 +2508,13 @@ PyConfig_Read(PyConfig *config) config_get_global_vars(config); - if (_PyWideStringList_Copy(&orig_argv, &config->argv) < 0) { - return _PyStatus_NO_MEMORY(); + if (config->_orig_argv.length == 0 + && !(config->argv.length == 1 + && wcscmp(config->argv.items[0], L"") == 0)) + { + if (_PyWideStringList_Copy(&config->_orig_argv, &config->argv) < 0) { + return _PyStatus_NO_MEMORY(); + } } _PyPreCmdline precmdline = _PyPreCmdline_INIT; @@ -2534,11 +2545,6 @@ PyConfig_Read(PyConfig *config) goto done; } - if (_Py_SetArgcArgv(orig_argv.length, orig_argv.items) < 0) { - status = _PyStatus_NO_MEMORY(); - goto done; - } - /* Check config consistency */ assert(config->isolated >= 0); assert(config->use_environment >= 0); @@ -2591,11 +2597,11 @@ PyConfig_Read(PyConfig *config) assert(config->check_hash_pycs_mode != NULL); assert(config->_install_importlib >= 0); assert(config->pathconfig_warnings >= 0); + assert(_PyWideStringList_CheckConsistency(&config->_orig_argv)); status = _PyStatus_OK(); done: - _PyWideStringList_Clear(&orig_argv); _PyPreCmdline_Clear(&precmdline); return status; } diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index d730a98d3e5b9..f2f7d585c8000 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -460,7 +460,10 @@ pyinit_core_reconfigure(_PyRuntimeState *runtime, return _PyStatus_ERR("can't make main interpreter"); } - _PyConfig_Write(config, runtime); + status = _PyConfig_Write(config, runtime); + if (_PyStatus_EXCEPTION(status)) { + return status; + } status = _PyInterpreterState_SetConfig(interp, config); if (_PyStatus_EXCEPTION(status)) { @@ -486,7 +489,10 @@ pycore_init_runtime(_PyRuntimeState *runtime, return _PyStatus_ERR("main interpreter already initialized"); } - _PyConfig_Write(config, runtime); + PyStatus status = _PyConfig_Write(config, runtime); + if (_PyStatus_EXCEPTION(status)) { + return status; + } /* Py_Finalize leaves _Py_Finalizing set in order to help daemon * threads behave a little more gracefully at interpreter shutdown. @@ -499,7 +505,7 @@ pycore_init_runtime(_PyRuntimeState *runtime, */ _PyRuntimeState_SetFinalizing(runtime, NULL); - PyStatus status = _Py_HashRandomization_Init(config); + status = _Py_HashRandomization_Init(config); if (_PyStatus_EXCEPTION(status)) { return status; } @@ -746,8 +752,6 @@ pyinit_config(_PyRuntimeState *runtime, PyThreadState **tstate_p, const PyConfig *config) { - _PyConfig_Write(config, runtime); - PyStatus status = pycore_init_runtime(runtime, config); if (_PyStatus_EXCEPTION(status)) { return status; From webhook-mailer at python.org Mon Jun 8 12:48:47 2020 From: webhook-mailer at python.org (Steve Dower) Date: Mon, 08 Jun 2020 16:48:47 -0000 Subject: [Python-checkins] bpo-40861: Enable optimizations when building liblzma (GH-20724) Message-ID: https://github.com/python/cpython/commit/3a3a30c5a4622e18be9f7e4a239dc9e0d7c8054c commit: 3a3a30c5a4622e18be9f7e4a239dc9e0d7c8054c branch: master author: Steve Dower committer: GitHub date: 2020-06-08T17:48:43+01:00 summary: bpo-40861: Enable optimizations when building liblzma (GH-20724) files: M PCbuild/liblzma.vcxproj diff --git a/PCbuild/liblzma.vcxproj b/PCbuild/liblzma.vcxproj index 9ec062e5255f0..a6bd59ec0baa3 100644 --- a/PCbuild/liblzma.vcxproj +++ b/PCbuild/liblzma.vcxproj @@ -91,11 +91,8 @@ - WIN32;HAVE_CONFIG_H;_DEBUG;_LIB;%(PreprocessorDefinitions) - Level3 - ProgramDatabase - Disabled - $(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple + WIN32;HAVE_CONFIG_H;_LIB;%(PreprocessorDefinitions) + $(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple;%(AdditionalIncludeDirectories) 4028;4113;4133;4244;4267;4996;%(DisableSpecificWarnings) From webhook-mailer at python.org Mon Jun 8 13:06:41 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Jun 2020 17:06:41 -0000 Subject: [Python-checkins] bpo-40861: Enable optimizations when building liblzma (GH-20724) Message-ID: https://github.com/python/cpython/commit/62e7f9ab55a6426708d5316da6f07d3fe220b53a commit: 62e7f9ab55a6426708d5316da6f07d3fe220b53a branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-08T10:06:31-07:00 summary: bpo-40861: Enable optimizations when building liblzma (GH-20724) (cherry picked from commit 3a3a30c5a4622e18be9f7e4a239dc9e0d7c8054c) Co-authored-by: Steve Dower files: M PCbuild/liblzma.vcxproj diff --git a/PCbuild/liblzma.vcxproj b/PCbuild/liblzma.vcxproj index f408b5478558c..f7f3dba554b9e 100644 --- a/PCbuild/liblzma.vcxproj +++ b/PCbuild/liblzma.vcxproj @@ -59,11 +59,8 @@ - WIN32;HAVE_CONFIG_H;_DEBUG;_LIB;%(PreprocessorDefinitions) - Level3 - ProgramDatabase - Disabled - $(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple + WIN32;HAVE_CONFIG_H;_LIB;%(PreprocessorDefinitions) + $(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple;%(AdditionalIncludeDirectories) 4028;4113;4133;4244;4267;4996;%(DisableSpecificWarnings) From webhook-mailer at python.org Mon Jun 8 13:07:36 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Jun 2020 17:07:36 -0000 Subject: [Python-checkins] bpo-40861: Enable optimizations when building liblzma (GH-20724) Message-ID: https://github.com/python/cpython/commit/30513b627777b936e3df8e4b6dd4d6b280a6b765 commit: 30513b627777b936e3df8e4b6dd4d6b280a6b765 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-08T10:07:27-07:00 summary: bpo-40861: Enable optimizations when building liblzma (GH-20724) (cherry picked from commit 3a3a30c5a4622e18be9f7e4a239dc9e0d7c8054c) Co-authored-by: Steve Dower files: M PCbuild/liblzma.vcxproj diff --git a/PCbuild/liblzma.vcxproj b/PCbuild/liblzma.vcxproj index 9ec062e5255f0..a6bd59ec0baa3 100644 --- a/PCbuild/liblzma.vcxproj +++ b/PCbuild/liblzma.vcxproj @@ -91,11 +91,8 @@ - WIN32;HAVE_CONFIG_H;_DEBUG;_LIB;%(PreprocessorDefinitions) - Level3 - ProgramDatabase - Disabled - $(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple + WIN32;HAVE_CONFIG_H;_LIB;%(PreprocessorDefinitions) + $(lzmaDir)windows;$(lzmaDir)src/liblzma/common;$(lzmaDir)src/common;$(lzmaDir)src/liblzma/api;$(lzmaDir)src/liblzma/check;$(lzmaDir)src/liblzma/delta;$(lzmaDir)src/liblzma/lz;$(lzmaDir)src/liblzma/lzma;$(lzmaDir)src/liblzma/rangecoder;$(lzmaDir)src/liblzma/simple;%(AdditionalIncludeDirectories) 4028;4113;4133;4244;4267;4996;%(DisableSpecificWarnings) From webhook-mailer at python.org Mon Jun 8 13:31:36 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 17:31:36 -0000 Subject: [Python-checkins] Remove deleted libmpdec header from the Visual Studio build machinery. (GH-20730) Message-ID: https://github.com/python/cpython/commit/0c59f440f4c9dca658e6b18db14b67b750e25a87 commit: 0c59f440f4c9dca658e6b18db14b67b750e25a87 branch: master author: Stefan Krah committer: GitHub date: 2020-06-08T19:31:29+02:00 summary: Remove deleted libmpdec header from the Visual Studio build machinery. (GH-20730) files: M PCbuild/_decimal.vcxproj M PCbuild/_decimal.vcxproj.filters diff --git a/PCbuild/_decimal.vcxproj b/PCbuild/_decimal.vcxproj index f0f387f3bfaa5..4c71cdb6d1d77 100644 --- a/PCbuild/_decimal.vcxproj +++ b/PCbuild/_decimal.vcxproj @@ -118,7 +118,6 @@ - diff --git a/PCbuild/_decimal.vcxproj.filters b/PCbuild/_decimal.vcxproj.filters index 1aa9d020d672f..5f7de3d85381e 100644 --- a/PCbuild/_decimal.vcxproj.filters +++ b/PCbuild/_decimal.vcxproj.filters @@ -57,9 +57,6 @@ Header Files - - Header Files - @@ -113,4 +110,4 @@ Source Files - \ No newline at end of file + From webhook-mailer at python.org Mon Jun 8 13:33:17 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 17:33:17 -0000 Subject: [Python-checkins] Add multicore support to deccheck.py. (GH-20731) Message-ID: https://github.com/python/cpython/commit/951d680d56d8c32556437a86f6b42f221635b97f commit: 951d680d56d8c32556437a86f6b42f221635b97f branch: master author: Stefan Krah committer: GitHub date: 2020-06-08T19:33:12+02:00 summary: Add multicore support to deccheck.py. (GH-20731) files: M Modules/_decimal/tests/deccheck.py diff --git a/Modules/_decimal/tests/deccheck.py b/Modules/_decimal/tests/deccheck.py index 5cd5db5711426..5d9179e61689d 100644 --- a/Modules/_decimal/tests/deccheck.py +++ b/Modules/_decimal/tests/deccheck.py @@ -29,9 +29,20 @@ # Usage: python deccheck.py [--short|--medium|--long|--all] # -import sys, random + +import sys +import os +import time +import random from copy import copy from collections import defaultdict + +import argparse +import subprocess +from subprocess import PIPE, STDOUT +from queue import Queue, Empty +from threading import Thread, Event, Lock + from test.support import import_fresh_module from randdec import randfloat, all_unary, all_binary, all_ternary from randdec import unary_optarg, binary_optarg, ternary_optarg @@ -1124,18 +1135,35 @@ def check_untested(funcdict, c_cls, p_cls): funcdict['untested'] = tuple(sorted(intersect-tested)) - #for key in ('untested', 'c_only', 'p_only'): - # s = 'Context' if c_cls == C.Context else 'Decimal' - # print("\n%s %s:\n%s" % (s, key, funcdict[key])) + # for key in ('untested', 'c_only', 'p_only'): + # s = 'Context' if c_cls == C.Context else 'Decimal' + # print("\n%s %s:\n%s" % (s, key, funcdict[key])) if __name__ == '__main__': - import time + parser = argparse.ArgumentParser(prog="deccheck.py") + + group = parser.add_mutually_exclusive_group() + group.add_argument('--short', dest='time', action="store_const", const='short', default='short', help="short test (default)") + group.add_argument('--medium', dest='time', action="store_const", const='medium', default='short', help="medium test (reasonable run time)") + group.add_argument('--long', dest='time', action="store_const", const='long', default='short', help="long test (long run time)") + group.add_argument('--all', dest='time', action="store_const", const='all', default='short', help="all tests (excessive run time)") + + group = parser.add_mutually_exclusive_group() + group.add_argument('--single', dest='single', nargs=1, default=False, metavar="TEST", help="run a single test") + group.add_argument('--multicore', dest='multicore', action="store_true", default=False, help="use all available cores") + + args = parser.parse_args() + assert args.single is False or args.multicore is False + if args.single: + args.single = args.single[0] + randseed = int(time.time()) random.seed(randseed) + # Set up the testspecs list. A testspec is simply a dictionary # that determines the amount of different contexts that 'test_method' # will generate. @@ -1168,17 +1196,17 @@ def check_untested(funcdict, c_cls, p_cls): {'prec': [34], 'expts': [(-6143, 6144)], 'clamp': 1, 'iter': None} ] - if '--medium' in sys.argv: + if args.time == 'medium': base['expts'].append(('rand', 'rand')) # 5 random precisions base['samples'] = 5 testspecs = [small] + ieee + [base] - if '--long' in sys.argv: + elif args.time == 'long': base['expts'].append(('rand', 'rand')) # 10 random precisions base['samples'] = 10 testspecs = [small] + ieee + [base] - elif '--all' in sys.argv: + elif args.time == 'all': base['expts'].append(('rand', 'rand')) # All precisions in [1, 100] base['samples'] = 100 @@ -1195,39 +1223,100 @@ def check_untested(funcdict, c_cls, p_cls): small['expts'] = [(-prec, prec)] testspecs = [small, rand_ieee, base] + check_untested(Functions, C.Decimal, P.Decimal) check_untested(ContextFunctions, C.Context, P.Context) - log("\n\nRandom seed: %d\n\n", randseed) + if args.multicore: + q = Queue() + elif args.single: + log("Random seed: %d", randseed) + else: + log("\n\nRandom seed: %d\n\n", randseed) + + + FOUND_METHOD = False + def do_single(method, f): + global FOUND_METHOD + if args.multicore: + q.put(method) + elif not args.single or args.single == method: + FOUND_METHOD = True + f() # Decimal methods: for method in Functions['unary'] + Functions['unary_ctx'] + \ Functions['unary_rnd_ctx']: - test_method(method, testspecs, test_unary) + do_single(method, lambda: test_method(method, testspecs, test_unary)) for method in Functions['binary'] + Functions['binary_ctx']: - test_method(method, testspecs, test_binary) + do_single(method, lambda: test_method(method, testspecs, test_binary)) for method in Functions['ternary'] + Functions['ternary_ctx']: - test_method(method, testspecs, test_ternary) + name = '__powmod__' if method == '__pow__' else method + do_single(name, lambda: test_method(method, testspecs, test_ternary)) - test_method('__format__', testspecs, test_format) - test_method('__round__', testspecs, test_round) - test_method('from_float', testspecs, test_from_float) - test_method('quantize', testspecs, test_quantize_api) + do_single('__format__', lambda: test_method('__format__', testspecs, test_format)) + do_single('__round__', lambda: test_method('__round__', testspecs, test_round)) + do_single('from_float', lambda: test_method('from_float', testspecs, test_from_float)) + do_single('quantize_api', lambda: test_method('quantize', testspecs, test_quantize_api)) # Context methods: for method in ContextFunctions['unary']: - test_method(method, testspecs, test_unary) + do_single(method, lambda: test_method(method, testspecs, test_unary)) for method in ContextFunctions['binary']: - test_method(method, testspecs, test_binary) + do_single(method, lambda: test_method(method, testspecs, test_binary)) for method in ContextFunctions['ternary']: - test_method(method, testspecs, test_ternary) + name = 'context.powmod' if method == 'context.power' else method + do_single(name, lambda: test_method(method, testspecs, test_ternary)) + + do_single('context.create_decimal_from_float', + lambda: test_method('context.create_decimal_from_float', + testspecs, test_from_float)) + + if args.multicore: + error = Event() + write_lock = Lock() - test_method('context.create_decimal_from_float', testspecs, test_from_float) + def write_output(out, returncode): + if returncode != 0: + error.set() + + with write_lock: + sys.stdout.buffer.write(out + b"\n") + sys.stdout.buffer.flush() + + def tfunc(): + while not error.is_set(): + try: + test = q.get(block=False, timeout=-1) + except Empty: + return + cmd = [sys.executable, "deccheck.py", "--%s" % args.time, "--single", test] + p = subprocess.Popen(cmd, stdout=PIPE, stderr=STDOUT) + out, _ = p.communicate() + write_output(out, p.returncode) - sys.exit(EXIT_STATUS) + N = os.cpu_count() + t = N * [None] + + for i in range(N): + t[i] = Thread(target=tfunc) + t[i].start() + + for i in range(N): + t[i].join() + + sys.exit(1 if error.is_set() else 0) + + elif args.single: + if not FOUND_METHOD: + log("\nerror: cannot find method \"%s\"" % args.single) + EXIT_STATUS = 1 + sys.exit(EXIT_STATUS) + else: + sys.exit(EXIT_STATUS) From webhook-mailer at python.org Mon Jun 8 14:04:52 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 08 Jun 2020 18:04:52 -0000 Subject: [Python-checkins] bpo-40854: PYTHONPLATLIBDIR env var added to 3.9 (GH-20735) Message-ID: https://github.com/python/cpython/commit/5edb83241f2ff899917e895092aca0216faf42d3 commit: 5edb83241f2ff899917e895092aca0216faf42d3 branch: master author: Victor Stinner committer: GitHub date: 2020-06-08T20:04:47+02:00 summary: bpo-40854: PYTHONPLATLIBDIR env var added to 3.9 (GH-20735) files: M Doc/c-api/init_config.rst M Doc/using/cmdline.rst diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index c51b157bbb33e..b7298ba825d3c 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -443,7 +443,7 @@ PyConfig by ``--with-platlibdir``, overrideable by the ``PYTHONPLATLIBDIR`` environment variable. - .. versionadded:: 3.10 + .. versionadded:: 3.9 .. c:member:: int buffered_stdio diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 3e0797279d6bf..f91ab020da5cf 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -543,7 +543,7 @@ conflict. If this is set to a non-empty string, it overrides the :data:`sys.platlibdir` value. - .. versionadded:: 3.10 + .. versionadded:: 3.9 .. envvar:: PYTHONSTARTUP From webhook-mailer at python.org Mon Jun 8 14:07:33 2020 From: webhook-mailer at python.org (Brett Cannon) Date: Mon, 08 Jun 2020 18:07:33 -0000 Subject: [Python-checkins] bpo-24914: mention Python supports multiple paradigms in the FAQ (#20658) Message-ID: https://github.com/python/cpython/commit/3ab3475c42c8ee5580f4ea1aeda73ebc8e5d5478 commit: 3ab3475c42c8ee5580f4ea1aeda73ebc8e5d5478 branch: master author: Brett Cannon committer: GitHub date: 2020-06-08T11:07:29-07:00 summary: bpo-24914: mention Python supports multiple paradigms in the FAQ (#20658) files: M Doc/faq/general.rst diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index 3ef553e8acb43..70837341b1b33 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -17,12 +17,13 @@ What is Python? Python is an interpreted, interactive, object-oriented programming language. It incorporates modules, exceptions, dynamic typing, very high level dynamic data -types, and classes. Python combines remarkable power with very clear syntax. -It has interfaces to many system calls and libraries, as well as to various -window systems, and is extensible in C or C++. It is also usable as an -extension language for applications that need a programmable interface. -Finally, Python is portable: it runs on many Unix variants, on the Mac, and on -Windows 2000 and later. +types, and classes. It supports multiple programming paradigms beyond +object-oriented programming, such as procedural and functional programming. +Python combines remarkable power with very clear syntax. It has interfaces to +many system calls and libraries, as well as to various window systems, and is +extensible in C or C++. It is also usable as an extension language for +applications that need a programmable interface. Finally, Python is portable: +it runs on many Unix variants including Linux and macOS, and on Windows. To find out more, start with :ref:`tutorial-index`. The `Beginner's Guide to Python `_ links to other From webhook-mailer at python.org Mon Jun 8 14:53:34 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 08 Jun 2020 18:53:34 -0000 Subject: [Python-checkins] bpo-24914: mention Python supports multiple paradigms in the FAQ (GH-20658) (GH-20738) Message-ID: https://github.com/python/cpython/commit/14073c509058f8efeb5ea7f7693bf84f410d24b7 commit: 14073c509058f8efeb5ea7f7693bf84f410d24b7 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-08T11:53:25-07:00 summary: bpo-24914: mention Python supports multiple paradigms in the FAQ (GH-20658) (GH-20738) (cherry picked from commit 3ab3475c42c8ee5580f4ea1aeda73ebc8e5d5478) Co-authored-by: Brett Cannon Co-authored-by: Brett Cannon files: M Doc/faq/general.rst diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index 3ef553e8acb43..70837341b1b33 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -17,12 +17,13 @@ What is Python? Python is an interpreted, interactive, object-oriented programming language. It incorporates modules, exceptions, dynamic typing, very high level dynamic data -types, and classes. Python combines remarkable power with very clear syntax. -It has interfaces to many system calls and libraries, as well as to various -window systems, and is extensible in C or C++. It is also usable as an -extension language for applications that need a programmable interface. -Finally, Python is portable: it runs on many Unix variants, on the Mac, and on -Windows 2000 and later. +types, and classes. It supports multiple programming paradigms beyond +object-oriented programming, such as procedural and functional programming. +Python combines remarkable power with very clear syntax. It has interfaces to +many system calls and libraries, as well as to various window systems, and is +extensible in C or C++. It is also usable as an extension language for +applications that need a programmable interface. Finally, Python is portable: +it runs on many Unix variants including Linux and macOS, and on Windows. To find out more, start with :ref:`tutorial-index`. The `Beginner's Guide to Python `_ links to other From webhook-mailer at python.org Mon Jun 8 15:38:48 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Mon, 08 Jun 2020 19:38:48 -0000 Subject: [Python-checkins] Minor improvement to the namedtuple implementation (GH-20741) Message-ID: https://github.com/python/cpython/commit/0a40849eb99a0357113bff10434ec6605e3ae96b commit: 0a40849eb99a0357113bff10434ec6605e3ae96b branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-08T12:38:41-07:00 summary: Minor improvement to the namedtuple implementation (GH-20741) * Cleaner way to build the arg list with a trailing comma when required * Fix appearance of __new__ in help() files: M Lib/collections/__init__.py diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 1e3b54ccf9cc9..6a06cc6a64f16 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -399,7 +399,9 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non # Variables used in the methods and docstrings field_names = tuple(map(_sys.intern, field_names)) num_fields = len(field_names) - arg_list = repr(field_names).replace("'", "")[1:-1] + arg_list = ', '.join(field_names) + if num_fields == 1: + arg_list += ',' repr_fmt = '(' + ', '.join(f'{name}=%r' for name in field_names) + ')' tuple_new = tuple.__new__ _dict, _tuple, _len, _map, _zip = dict, tuple, len, map, zip @@ -410,6 +412,7 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non namespace = {'_tuple_new': tuple_new, '__builtins__': None, '__name__': f'namedtuple_{typename}'} __new__ = eval(s, namespace) + __new__.__name__ = '__new__' __new__.__doc__ = f'Create new instance of {typename}({arg_list})' if defaults is not None: __new__.__defaults__ = defaults From webhook-mailer at python.org Mon Jun 8 19:21:03 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 23:21:03 -0000 Subject: [Python-checkins] [3.8] Revert bpo-39576: Clarify the word size for the 32-bit build. (GH-20743) Message-ID: https://github.com/python/cpython/commit/706de4e5a4b21880c67f6b90e3a2147a258d6fc5 commit: 706de4e5a4b21880c67f6b90e3a2147a258d6fc5 branch: 3.8 author: Stefan Krah committer: GitHub date: 2020-06-09T01:20:58+02:00 summary: [3.8] Revert bpo-39576: Clarify the word size for the 32-bit build. (GH-20743) This reverts commit c6ecd9c14081a787959e13df33e250102a658154. files: M Doc/library/decimal.rst diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 3dda35fbd35db..f9421aba423ff 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -2164,8 +2164,8 @@ RAM and expect 10 simultaneous operands using a maximum of 500MB each:: >>> import sys >>> - >>> # Maximum number of digits for a single operand using 500MB in 8-byte words - >>> # with 19 digits per word (4-byte and 9 digits for the 32-bit build): + >>> # Maximum number of digits for a single operand using 500MB in 8 byte words + >>> # with 19 (9 for the 32-bit version) digits per word: >>> maxdigits = 19 * ((500 * 1024**2) // 8) >>> >>> # Check that this works: From webhook-mailer at python.org Mon Jun 8 19:22:08 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 23:22:08 -0000 Subject: [Python-checkins] [3.7] Revert bpo-39576: Clarify the word size for the 32-bit build. (GH-20744) Message-ID: https://github.com/python/cpython/commit/c0b79450bc9e93105799528151c48d25af8240a3 commit: c0b79450bc9e93105799528151c48d25af8240a3 branch: 3.7 author: Stefan Krah committer: GitHub date: 2020-06-09T01:22:03+02:00 summary: [3.7] Revert bpo-39576: Clarify the word size for the 32-bit build. (GH-20744) This reverts commit 24c570bbb82a7cb70576c253a73390accfa7ed78. files: M Doc/library/decimal.rst diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 3dda35fbd35db..f9421aba423ff 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -2164,8 +2164,8 @@ RAM and expect 10 simultaneous operands using a maximum of 500MB each:: >>> import sys >>> - >>> # Maximum number of digits for a single operand using 500MB in 8-byte words - >>> # with 19 digits per word (4-byte and 9 digits for the 32-bit build): + >>> # Maximum number of digits for a single operand using 500MB in 8 byte words + >>> # with 19 (9 for the 32-bit version) digits per word: >>> maxdigits = 19 * ((500 * 1024**2) // 8) >>> >>> # Check that this works: From webhook-mailer at python.org Mon Jun 8 19:33:16 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 23:33:16 -0000 Subject: [Python-checkins] [3.8] Revert bpo-39576: docs: set context for decimal arbitrary precision arithmetic (GH-20745) Message-ID: https://github.com/python/cpython/commit/32c1fb07e6f2ded90e5dd24d4b46b7aa7a795d2e commit: 32c1fb07e6f2ded90e5dd24d4b46b7aa7a795d2e branch: 3.8 author: Stefan Krah committer: GitHub date: 2020-06-09T01:33:08+02:00 summary: [3.8] Revert bpo-39576: docs: set context for decimal arbitrary precision arithmetic (GH-20745) This reverts commit d6965ff026f35498e554bc964ef2be8f4d80eb7f. files: M Doc/library/decimal.rst diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index f9421aba423ff..8169bd358c701 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -2130,67 +2130,17 @@ Q. Is the CPython implementation fast for large numbers? A. Yes. In the CPython and PyPy3 implementations, the C/CFFI versions of the decimal module integrate the high speed `libmpdec `_ library for -arbitrary precision correctly-rounded decimal floating point arithmetic [#]_. +arbitrary precision correctly-rounded decimal floating point arithmetic. ``libmpdec`` uses `Karatsuba multiplication `_ for medium-sized numbers and the `Number Theoretic Transform `_ -for very large numbers. +for very large numbers. However, to realize this performance gain, the +context needs to be set for unrounded calculations. -The context must be adapted for exact arbitrary precision arithmetic. :attr:`Emin` -and :attr:`Emax` should always be set to the maximum values, :attr:`clamp` -should always be 0 (the default). Setting :attr:`prec` requires some care. + >>> c = getcontext() + >>> c.prec = MAX_PREC + >>> c.Emax = MAX_EMAX + >>> c.Emin = MIN_EMIN -The easiest approach for trying out bignum arithmetic is to use the maximum -value for :attr:`prec` as well [#]_:: - - >>> setcontext(Context(prec=MAX_PREC, Emax=MAX_EMAX, Emin=MIN_EMIN)) - >>> x = Decimal(2) ** 256 - >>> x / 128 - Decimal('904625697166532776746648320380374280103671755200316906558262375061821325312') - - -For inexact results, :attr:`MAX_PREC` is far too large on 64-bit platforms and -the available memory will be insufficient:: - - >>> Decimal(1) / 3 - Traceback (most recent call last): - File "", line 1, in - MemoryError - -On systems with overallocation (e.g. Linux), a more sophisticated approach is to -adjust :attr:`prec` to the amount of available RAM. Suppose that you have 8GB of -RAM and expect 10 simultaneous operands using a maximum of 500MB each:: - - >>> import sys - >>> - >>> # Maximum number of digits for a single operand using 500MB in 8 byte words - >>> # with 19 (9 for the 32-bit version) digits per word: - >>> maxdigits = 19 * ((500 * 1024**2) // 8) - >>> - >>> # Check that this works: - >>> c = Context(prec=maxdigits, Emax=MAX_EMAX, Emin=MIN_EMIN) - >>> c.traps[Inexact] = True - >>> setcontext(c) - >>> - >>> # Fill the available precision with nines: - >>> x = Decimal(0).logical_invert() * 9 - >>> sys.getsizeof(x) - 524288112 - >>> x + 2 - Traceback (most recent call last): - File "", line 1, in - decimal.Inexact: [] - -In general (and especially on systems without overallocation), it is recommended -to estimate even tighter bounds and set the :attr:`Inexact` trap if all calculations -are expected to be exact. - - -.. [#] - .. versionadded:: 3.3 - -.. [#] - .. versionchanged:: 3.9 - This approach now works for all exact results except for non-integer powers. - Also backported to 3.7 and 3.8. +.. versionadded:: 3.3 \ No newline at end of file From webhook-mailer at python.org Mon Jun 8 19:34:07 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 23:34:07 -0000 Subject: [Python-checkins] [3.7] Revert bpo-39576: docs: set context for decimal arbitrary precision arithmetic (GH-20746) Message-ID: https://github.com/python/cpython/commit/9bd891920a5186b7d02281ea9966225efa0ceba1 commit: 9bd891920a5186b7d02281ea9966225efa0ceba1 branch: 3.7 author: Stefan Krah committer: GitHub date: 2020-06-09T01:34:03+02:00 summary: [3.7] Revert bpo-39576: docs: set context for decimal arbitrary precision arithmetic (GH-20746) This reverts commit 00e45877e33d32bb61aa13a2033e3bba370bda4d. files: M Doc/library/decimal.rst diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index f9421aba423ff..8169bd358c701 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -2130,67 +2130,17 @@ Q. Is the CPython implementation fast for large numbers? A. Yes. In the CPython and PyPy3 implementations, the C/CFFI versions of the decimal module integrate the high speed `libmpdec `_ library for -arbitrary precision correctly-rounded decimal floating point arithmetic [#]_. +arbitrary precision correctly-rounded decimal floating point arithmetic. ``libmpdec`` uses `Karatsuba multiplication `_ for medium-sized numbers and the `Number Theoretic Transform `_ -for very large numbers. +for very large numbers. However, to realize this performance gain, the +context needs to be set for unrounded calculations. -The context must be adapted for exact arbitrary precision arithmetic. :attr:`Emin` -and :attr:`Emax` should always be set to the maximum values, :attr:`clamp` -should always be 0 (the default). Setting :attr:`prec` requires some care. + >>> c = getcontext() + >>> c.prec = MAX_PREC + >>> c.Emax = MAX_EMAX + >>> c.Emin = MIN_EMIN -The easiest approach for trying out bignum arithmetic is to use the maximum -value for :attr:`prec` as well [#]_:: - - >>> setcontext(Context(prec=MAX_PREC, Emax=MAX_EMAX, Emin=MIN_EMIN)) - >>> x = Decimal(2) ** 256 - >>> x / 128 - Decimal('904625697166532776746648320380374280103671755200316906558262375061821325312') - - -For inexact results, :attr:`MAX_PREC` is far too large on 64-bit platforms and -the available memory will be insufficient:: - - >>> Decimal(1) / 3 - Traceback (most recent call last): - File "", line 1, in - MemoryError - -On systems with overallocation (e.g. Linux), a more sophisticated approach is to -adjust :attr:`prec` to the amount of available RAM. Suppose that you have 8GB of -RAM and expect 10 simultaneous operands using a maximum of 500MB each:: - - >>> import sys - >>> - >>> # Maximum number of digits for a single operand using 500MB in 8 byte words - >>> # with 19 (9 for the 32-bit version) digits per word: - >>> maxdigits = 19 * ((500 * 1024**2) // 8) - >>> - >>> # Check that this works: - >>> c = Context(prec=maxdigits, Emax=MAX_EMAX, Emin=MIN_EMIN) - >>> c.traps[Inexact] = True - >>> setcontext(c) - >>> - >>> # Fill the available precision with nines: - >>> x = Decimal(0).logical_invert() * 9 - >>> sys.getsizeof(x) - 524288112 - >>> x + 2 - Traceback (most recent call last): - File "", line 1, in - decimal.Inexact: [] - -In general (and especially on systems without overallocation), it is recommended -to estimate even tighter bounds and set the :attr:`Inexact` trap if all calculations -are expected to be exact. - - -.. [#] - .. versionadded:: 3.3 - -.. [#] - .. versionchanged:: 3.9 - This approach now works for all exact results except for non-integer powers. - Also backported to 3.7 and 3.8. +.. versionadded:: 3.3 \ No newline at end of file From webhook-mailer at python.org Mon Jun 8 19:55:55 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 23:55:55 -0000 Subject: [Python-checkins] [3.7] Revert bpo-39576: Prevent memory error for overly optimistic precisions (GH-20748) Message-ID: https://github.com/python/cpython/commit/22faf6ad3bcc0ae478a9a3e2d8e35888d88d6ce8 commit: 22faf6ad3bcc0ae478a9a3e2d8e35888d88d6ce8 branch: 3.7 author: Stefan Krah committer: GitHub date: 2020-06-09T01:55:47+02:00 summary: [3.7] Revert bpo-39576: Prevent memory error for overly optimistic precisions (GH-20748) This reverts commit c6f95543b4832c3f0170179da39bcf99b40a7aa8. files: M Lib/test/test_decimal.py M Modules/_decimal/libmpdec/mpdecimal.c M Modules/_decimal/tests/deccheck.py diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index 0e9cd3095c85e..1f37b5372a3e7 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -5476,41 +5476,6 @@ def __abs__(self): self.assertEqual(Decimal.from_float(cls(101.1)), Decimal.from_float(101.1)) - def test_maxcontext_exact_arith(self): - - # Make sure that exact operations do not raise MemoryError due - # to huge intermediate values when the context precision is very - # large. - - # The following functions fill the available precision and are - # therefore not suitable for large precisions (by design of the - # specification). - MaxContextSkip = ['logical_invert', 'next_minus', 'next_plus', - 'logical_and', 'logical_or', 'logical_xor', - 'next_toward', 'rotate', 'shift'] - - Decimal = C.Decimal - Context = C.Context - localcontext = C.localcontext - - # Here only some functions that are likely candidates for triggering a - # MemoryError are tested. deccheck.py has an exhaustive test. - maxcontext = Context(prec=C.MAX_PREC, Emin=C.MIN_EMIN, Emax=C.MAX_EMAX) - with localcontext(maxcontext): - self.assertEqual(Decimal(0).exp(), 1) - self.assertEqual(Decimal(1).ln(), 0) - self.assertEqual(Decimal(1).log10(), 0) - self.assertEqual(Decimal(10**2).log10(), 2) - self.assertEqual(Decimal(10**223).log10(), 223) - self.assertEqual(Decimal(10**19).logb(), 19) - self.assertEqual(Decimal(4).sqrt(), 2) - self.assertEqual(Decimal("40E9").sqrt(), Decimal('2.0E+5')) - self.assertEqual(divmod(Decimal(10), 3), (3, 1)) - self.assertEqual(Decimal(10) // 3, 3) - self.assertEqual(Decimal(4) / 2, 2) - self.assertEqual(Decimal(400) ** -1, Decimal('0.0025')) - - @requires_docstrings @unittest.skipUnless(C, "test requires C version") class SignatureTest(unittest.TestCase): diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c index 0986edb576a10..bfa8bb343e60c 100644 --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -3781,43 +3781,6 @@ mpd_qdiv(mpd_t *q, const mpd_t *a, const mpd_t *b, const mpd_context_t *ctx, uint32_t *status) { _mpd_qdiv(SET_IDEAL_EXP, q, a, b, ctx, status); - - if (*status & MPD_Malloc_error) { - /* Inexact quotients (the usual case) fill the entire context precision, - * which can lead to malloc() failures for very high precisions. Retry - * the operation with a lower precision in case the result is exact. - * - * We need an upper bound for the number of digits of a_coeff / b_coeff - * when the result is exact. If a_coeff' * 1 / b_coeff' is in lowest - * terms, then maxdigits(a_coeff') + maxdigits(1 / b_coeff') is a suitable - * bound. - * - * 1 / b_coeff' is exact iff b_coeff' exclusively has prime factors 2 or 5. - * The largest amount of digits is generated if b_coeff' is a power of 2 or - * a power of 5 and is less than or equal to log5(b_coeff') <= log2(b_coeff'). - * - * We arrive at a total upper bound: - * - * maxdigits(a_coeff') + maxdigits(1 / b_coeff') <= - * a->digits + log2(b_coeff) = - * a->digits + log10(b_coeff) / log10(2) <= - * a->digits + b->digits * 4; - */ - uint32_t workstatus = 0; - mpd_context_t workctx = *ctx; - workctx.prec = a->digits + b->digits * 4; - if (workctx.prec >= ctx->prec) { - return; /* No point in retrying, keep the original error. */ - } - - _mpd_qdiv(SET_IDEAL_EXP, q, a, b, &workctx, &workstatus); - if (workstatus == 0) { /* The result is exact, unrounded, normal etc. */ - *status = 0; - return; - } - - mpd_seterror(q, *status, status); - } } /* Internal function. */ @@ -7739,9 +7702,9 @@ mpd_qinvroot(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, /* END LIBMPDEC_ONLY */ /* Algorithm from decimal.py */ -static void -_mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, - uint32_t *status) +void +mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, + uint32_t *status) { mpd_context_t maxcontext; MPD_NEW_STATIC(c,0,0,0,0); @@ -7873,40 +7836,6 @@ _mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, goto out; } -void -mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, - uint32_t *status) -{ - _mpd_qsqrt(result, a, ctx, status); - - if (*status & (MPD_Malloc_error|MPD_Division_impossible)) { - /* The above conditions can occur at very high context precisions - * if intermediate values get too large. Retry the operation with - * a lower context precision in case the result is exact. - * - * If the result is exact, an upper bound for the number of digits - * is the number of digits in the input. - * - * NOTE: sqrt(40e9) = 2.0e+5 /\ digits(40e9) = digits(2.0e+5) = 2 - */ - uint32_t workstatus = 0; - mpd_context_t workctx = *ctx; - workctx.prec = a->digits; - - if (workctx.prec >= ctx->prec) { - return; /* No point in repeating this, keep the original error. */ - } - - _mpd_qsqrt(result, a, &workctx, &workstatus); - if (workstatus == 0) { - *status = 0; - return; - } - - mpd_seterror(result, *status, status); - } -} - /******************************************************************************/ /* Base conversions */ diff --git a/Modules/_decimal/tests/deccheck.py b/Modules/_decimal/tests/deccheck.py index 5cd5db5711426..f907531e1ffa5 100644 --- a/Modules/_decimal/tests/deccheck.py +++ b/Modules/_decimal/tests/deccheck.py @@ -125,12 +125,6 @@ 'special': ('context.__reduce_ex__', 'context.create_decimal_from_float') } -# Functions that set no context flags but whose result can differ depending -# on prec, Emin and Emax. -MaxContextSkip = ['is_normal', 'is_subnormal', 'logical_invert', 'next_minus', - 'next_plus', 'number_class', 'logical_and', 'logical_or', - 'logical_xor', 'next_toward', 'rotate', 'shift'] - # Functions that require a restricted exponent range for reasonable runtimes. UnaryRestricted = [ '__ceil__', '__floor__', '__int__', '__trunc__', @@ -350,20 +344,6 @@ def __init__(self, funcname, operands): self.pex = RestrictedList() # Python exceptions for P.Decimal self.presults = RestrictedList() # P.Decimal results - # If the above results are exact, unrounded and not clamped, repeat - # the operation with a maxcontext to ensure that huge intermediate - # values do not cause a MemoryError. - self.with_maxcontext = False - self.maxcontext = context.c.copy() - self.maxcontext.prec = C.MAX_PREC - self.maxcontext.Emax = C.MAX_EMAX - self.maxcontext.Emin = C.MIN_EMIN - self.maxcontext.clear_flags() - - self.maxop = RestrictedList() # converted C.Decimal operands - self.maxex = RestrictedList() # Python exceptions for C.Decimal - self.maxresults = RestrictedList() # C.Decimal results - # ====================================================================== # SkipHandler: skip known discrepancies @@ -565,17 +545,13 @@ def function_as_string(t): if t.contextfunc: cargs = t.cop pargs = t.pop - maxargs = t.maxop cfunc = "c_func: %s(" % t.funcname pfunc = "p_func: %s(" % t.funcname - maxfunc = "max_func: %s(" % t.funcname else: cself, cargs = t.cop[0], t.cop[1:] pself, pargs = t.pop[0], t.pop[1:] - maxself, maxargs = t.maxop[0], t.maxop[1:] cfunc = "c_func: %s.%s(" % (repr(cself), t.funcname) pfunc = "p_func: %s.%s(" % (repr(pself), t.funcname) - maxfunc = "max_func: %s.%s(" % (repr(maxself), t.funcname) err = cfunc for arg in cargs: @@ -589,14 +565,6 @@ def function_as_string(t): err = err.rstrip(", ") err += ")" - if t.with_maxcontext: - err += "\n" - err += maxfunc - for arg in maxargs: - err += "%s, " % repr(arg) - err = err.rstrip(", ") - err += ")" - return err def raise_error(t): @@ -609,24 +577,9 @@ def raise_error(t): err = "Error in %s:\n\n" % t.funcname err += "input operands: %s\n\n" % (t.op,) err += function_as_string(t) - - err += "\n\nc_result: %s\np_result: %s\n" % (t.cresults, t.presults) - if t.with_maxcontext: - err += "max_result: %s\n\n" % (t.maxresults) - else: - err += "\n" - - err += "c_exceptions: %s\np_exceptions: %s\n" % (t.cex, t.pex) - if t.with_maxcontext: - err += "max_exceptions: %s\n\n" % t.maxex - else: - err += "\n" - - err += "%s\n" % str(t.context) - if t.with_maxcontext: - err += "%s\n" % str(t.maxcontext) - else: - err += "\n" + err += "\n\nc_result: %s\np_result: %s\n\n" % (t.cresults, t.presults) + err += "c_exceptions: %s\np_exceptions: %s\n\n" % (t.cex, t.pex) + err += "%s\n\n" % str(t.context) raise VerifyError(err) @@ -650,13 +603,6 @@ def raise_error(t): # are printed to stdout. # ====================================================================== -def all_nan(a): - if isinstance(a, C.Decimal): - return a.is_nan() - elif isinstance(a, tuple): - return all(all_nan(v) for v in a) - return False - def convert(t, convstr=True): """ t is the testset. At this stage the testset contains a tuple of operands t.op of various types. For decimal methods the first @@ -671,12 +617,10 @@ def convert(t, convstr=True): for i, op in enumerate(t.op): context.clear_status() - t.maxcontext.clear_flags() if op in RoundModes: t.cop.append(op) t.pop.append(op) - t.maxop.append(op) elif not t.contextfunc and i == 0 or \ convstr and isinstance(op, str): @@ -694,25 +638,11 @@ def convert(t, convstr=True): p = None pex = e.__class__ - try: - C.setcontext(t.maxcontext) - maxop = C.Decimal(op) - maxex = None - except (TypeError, ValueError, OverflowError) as e: - maxop = None - maxex = e.__class__ - finally: - C.setcontext(context.c) - t.cop.append(c) t.cex.append(cex) - t.pop.append(p) t.pex.append(pex) - t.maxop.append(maxop) - t.maxex.append(maxex) - if cex is pex: if str(c) != str(p) or not context.assert_eq_status(): raise_error(t) @@ -722,21 +652,14 @@ def convert(t, convstr=True): else: raise_error(t) - # The exceptions in the maxcontext operation can legitimately - # differ, only test that maxex implies cex: - if maxex is not None and cex is not maxex: - raise_error(t) - elif isinstance(op, Context): t.context = op t.cop.append(op.c) t.pop.append(op.p) - t.maxop.append(t.maxcontext) else: t.cop.append(op) t.pop.append(op) - t.maxop.append(op) return 1 @@ -750,7 +673,6 @@ def callfuncs(t): t.rc and t.rp are the results of the operation. """ context.clear_status() - t.maxcontext.clear_flags() try: if t.contextfunc: @@ -778,35 +700,6 @@ def callfuncs(t): t.rp = None t.pex.append(e.__class__) - # If the above results are exact, unrounded, normal etc., repeat the - # operation with a maxcontext to ensure that huge intermediate values - # do not cause a MemoryError. - if (t.funcname not in MaxContextSkip and - not context.c.flags[C.InvalidOperation] and - not context.c.flags[C.Inexact] and - not context.c.flags[C.Rounded] and - not context.c.flags[C.Subnormal] and - not context.c.flags[C.Clamped] and - not context.clamp and # results are padded to context.prec if context.clamp==1. - not any(isinstance(v, C.Context) for v in t.cop)): # another context is used. - t.with_maxcontext = True - try: - if t.contextfunc: - maxargs = t.maxop - t.rmax = getattr(t.maxcontext, t.funcname)(*maxargs) - else: - maxself = t.maxop[0] - maxargs = t.maxop[1:] - try: - C.setcontext(t.maxcontext) - t.rmax = getattr(maxself, t.funcname)(*maxargs) - finally: - C.setcontext(context.c) - t.maxex.append(None) - except (TypeError, ValueError, OverflowError, MemoryError) as e: - t.rmax = None - t.maxex.append(e.__class__) - def verify(t, stat): """ t is the testset. At this stage the testset contains the following tuples: @@ -821,9 +714,6 @@ def verify(t, stat): """ t.cresults.append(str(t.rc)) t.presults.append(str(t.rp)) - if t.with_maxcontext: - t.maxresults.append(str(t.rmax)) - if isinstance(t.rc, C.Decimal) and isinstance(t.rp, P.Decimal): # General case: both results are Decimals. t.cresults.append(t.rc.to_eng_string()) @@ -835,12 +725,6 @@ def verify(t, stat): t.presults.append(str(t.rp.imag)) t.presults.append(str(t.rp.real)) - if t.with_maxcontext and isinstance(t.rmax, C.Decimal): - t.maxresults.append(t.rmax.to_eng_string()) - t.maxresults.append(t.rmax.as_tuple()) - t.maxresults.append(str(t.rmax.imag)) - t.maxresults.append(str(t.rmax.real)) - nc = t.rc.number_class().lstrip('+-s') stat[nc] += 1 else: @@ -848,9 +732,6 @@ def verify(t, stat): if not isinstance(t.rc, tuple) and not isinstance(t.rp, tuple): if t.rc != t.rp: raise_error(t) - if t.with_maxcontext and not isinstance(t.rmax, tuple): - if t.rmax != t.rc: - raise_error(t) stat[type(t.rc).__name__] += 1 # The return value lists must be equal. @@ -863,20 +744,6 @@ def verify(t, stat): if not t.context.assert_eq_status(): raise_error(t) - if t.with_maxcontext: - # NaN payloads etc. depend on precision and clamp. - if all_nan(t.rc) and all_nan(t.rmax): - return - # The return value lists must be equal. - if t.maxresults != t.cresults: - raise_error(t) - # The Python exception lists (TypeError, etc.) must be equal. - if t.maxex != t.cex: - raise_error(t) - # The context flags must be equal. - if t.maxcontext.flags != t.context.c.flags: - raise_error(t) - # ====================================================================== # Main test loops From webhook-mailer at python.org Mon Jun 8 19:57:15 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 08 Jun 2020 23:57:15 -0000 Subject: [Python-checkins] [3.8] Revert bpo-39576: Prevent memory error for overly optimistic precisions (GH-20747) Message-ID: https://github.com/python/cpython/commit/0f5a28f834bdac2da8a04597dc0fc5b71e50da9d commit: 0f5a28f834bdac2da8a04597dc0fc5b71e50da9d branch: 3.8 author: Stefan Krah committer: GitHub date: 2020-06-09T01:57:11+02:00 summary: [3.8] Revert bpo-39576: Prevent memory error for overly optimistic precisions (GH-20747) This reverts commit b6271025c640c228505dc9f194362a0c2ab81c61. files: M Lib/test/test_decimal.py M Modules/_decimal/libmpdec/mpdecimal.c M Modules/_decimal/tests/deccheck.py diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index 0e9cd3095c85e..1f37b5372a3e7 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -5476,41 +5476,6 @@ def __abs__(self): self.assertEqual(Decimal.from_float(cls(101.1)), Decimal.from_float(101.1)) - def test_maxcontext_exact_arith(self): - - # Make sure that exact operations do not raise MemoryError due - # to huge intermediate values when the context precision is very - # large. - - # The following functions fill the available precision and are - # therefore not suitable for large precisions (by design of the - # specification). - MaxContextSkip = ['logical_invert', 'next_minus', 'next_plus', - 'logical_and', 'logical_or', 'logical_xor', - 'next_toward', 'rotate', 'shift'] - - Decimal = C.Decimal - Context = C.Context - localcontext = C.localcontext - - # Here only some functions that are likely candidates for triggering a - # MemoryError are tested. deccheck.py has an exhaustive test. - maxcontext = Context(prec=C.MAX_PREC, Emin=C.MIN_EMIN, Emax=C.MAX_EMAX) - with localcontext(maxcontext): - self.assertEqual(Decimal(0).exp(), 1) - self.assertEqual(Decimal(1).ln(), 0) - self.assertEqual(Decimal(1).log10(), 0) - self.assertEqual(Decimal(10**2).log10(), 2) - self.assertEqual(Decimal(10**223).log10(), 223) - self.assertEqual(Decimal(10**19).logb(), 19) - self.assertEqual(Decimal(4).sqrt(), 2) - self.assertEqual(Decimal("40E9").sqrt(), Decimal('2.0E+5')) - self.assertEqual(divmod(Decimal(10), 3), (3, 1)) - self.assertEqual(Decimal(10) // 3, 3) - self.assertEqual(Decimal(4) / 2, 2) - self.assertEqual(Decimal(400) ** -1, Decimal('0.0025')) - - @requires_docstrings @unittest.skipUnless(C, "test requires C version") class SignatureTest(unittest.TestCase): diff --git a/Modules/_decimal/libmpdec/mpdecimal.c b/Modules/_decimal/libmpdec/mpdecimal.c index 0986edb576a10..bfa8bb343e60c 100644 --- a/Modules/_decimal/libmpdec/mpdecimal.c +++ b/Modules/_decimal/libmpdec/mpdecimal.c @@ -3781,43 +3781,6 @@ mpd_qdiv(mpd_t *q, const mpd_t *a, const mpd_t *b, const mpd_context_t *ctx, uint32_t *status) { _mpd_qdiv(SET_IDEAL_EXP, q, a, b, ctx, status); - - if (*status & MPD_Malloc_error) { - /* Inexact quotients (the usual case) fill the entire context precision, - * which can lead to malloc() failures for very high precisions. Retry - * the operation with a lower precision in case the result is exact. - * - * We need an upper bound for the number of digits of a_coeff / b_coeff - * when the result is exact. If a_coeff' * 1 / b_coeff' is in lowest - * terms, then maxdigits(a_coeff') + maxdigits(1 / b_coeff') is a suitable - * bound. - * - * 1 / b_coeff' is exact iff b_coeff' exclusively has prime factors 2 or 5. - * The largest amount of digits is generated if b_coeff' is a power of 2 or - * a power of 5 and is less than or equal to log5(b_coeff') <= log2(b_coeff'). - * - * We arrive at a total upper bound: - * - * maxdigits(a_coeff') + maxdigits(1 / b_coeff') <= - * a->digits + log2(b_coeff) = - * a->digits + log10(b_coeff) / log10(2) <= - * a->digits + b->digits * 4; - */ - uint32_t workstatus = 0; - mpd_context_t workctx = *ctx; - workctx.prec = a->digits + b->digits * 4; - if (workctx.prec >= ctx->prec) { - return; /* No point in retrying, keep the original error. */ - } - - _mpd_qdiv(SET_IDEAL_EXP, q, a, b, &workctx, &workstatus); - if (workstatus == 0) { /* The result is exact, unrounded, normal etc. */ - *status = 0; - return; - } - - mpd_seterror(q, *status, status); - } } /* Internal function. */ @@ -7739,9 +7702,9 @@ mpd_qinvroot(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, /* END LIBMPDEC_ONLY */ /* Algorithm from decimal.py */ -static void -_mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, - uint32_t *status) +void +mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, + uint32_t *status) { mpd_context_t maxcontext; MPD_NEW_STATIC(c,0,0,0,0); @@ -7873,40 +7836,6 @@ _mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, goto out; } -void -mpd_qsqrt(mpd_t *result, const mpd_t *a, const mpd_context_t *ctx, - uint32_t *status) -{ - _mpd_qsqrt(result, a, ctx, status); - - if (*status & (MPD_Malloc_error|MPD_Division_impossible)) { - /* The above conditions can occur at very high context precisions - * if intermediate values get too large. Retry the operation with - * a lower context precision in case the result is exact. - * - * If the result is exact, an upper bound for the number of digits - * is the number of digits in the input. - * - * NOTE: sqrt(40e9) = 2.0e+5 /\ digits(40e9) = digits(2.0e+5) = 2 - */ - uint32_t workstatus = 0; - mpd_context_t workctx = *ctx; - workctx.prec = a->digits; - - if (workctx.prec >= ctx->prec) { - return; /* No point in repeating this, keep the original error. */ - } - - _mpd_qsqrt(result, a, &workctx, &workstatus); - if (workstatus == 0) { - *status = 0; - return; - } - - mpd_seterror(result, *status, status); - } -} - /******************************************************************************/ /* Base conversions */ diff --git a/Modules/_decimal/tests/deccheck.py b/Modules/_decimal/tests/deccheck.py index 5cd5db5711426..f907531e1ffa5 100644 --- a/Modules/_decimal/tests/deccheck.py +++ b/Modules/_decimal/tests/deccheck.py @@ -125,12 +125,6 @@ 'special': ('context.__reduce_ex__', 'context.create_decimal_from_float') } -# Functions that set no context flags but whose result can differ depending -# on prec, Emin and Emax. -MaxContextSkip = ['is_normal', 'is_subnormal', 'logical_invert', 'next_minus', - 'next_plus', 'number_class', 'logical_and', 'logical_or', - 'logical_xor', 'next_toward', 'rotate', 'shift'] - # Functions that require a restricted exponent range for reasonable runtimes. UnaryRestricted = [ '__ceil__', '__floor__', '__int__', '__trunc__', @@ -350,20 +344,6 @@ def __init__(self, funcname, operands): self.pex = RestrictedList() # Python exceptions for P.Decimal self.presults = RestrictedList() # P.Decimal results - # If the above results are exact, unrounded and not clamped, repeat - # the operation with a maxcontext to ensure that huge intermediate - # values do not cause a MemoryError. - self.with_maxcontext = False - self.maxcontext = context.c.copy() - self.maxcontext.prec = C.MAX_PREC - self.maxcontext.Emax = C.MAX_EMAX - self.maxcontext.Emin = C.MIN_EMIN - self.maxcontext.clear_flags() - - self.maxop = RestrictedList() # converted C.Decimal operands - self.maxex = RestrictedList() # Python exceptions for C.Decimal - self.maxresults = RestrictedList() # C.Decimal results - # ====================================================================== # SkipHandler: skip known discrepancies @@ -565,17 +545,13 @@ def function_as_string(t): if t.contextfunc: cargs = t.cop pargs = t.pop - maxargs = t.maxop cfunc = "c_func: %s(" % t.funcname pfunc = "p_func: %s(" % t.funcname - maxfunc = "max_func: %s(" % t.funcname else: cself, cargs = t.cop[0], t.cop[1:] pself, pargs = t.pop[0], t.pop[1:] - maxself, maxargs = t.maxop[0], t.maxop[1:] cfunc = "c_func: %s.%s(" % (repr(cself), t.funcname) pfunc = "p_func: %s.%s(" % (repr(pself), t.funcname) - maxfunc = "max_func: %s.%s(" % (repr(maxself), t.funcname) err = cfunc for arg in cargs: @@ -589,14 +565,6 @@ def function_as_string(t): err = err.rstrip(", ") err += ")" - if t.with_maxcontext: - err += "\n" - err += maxfunc - for arg in maxargs: - err += "%s, " % repr(arg) - err = err.rstrip(", ") - err += ")" - return err def raise_error(t): @@ -609,24 +577,9 @@ def raise_error(t): err = "Error in %s:\n\n" % t.funcname err += "input operands: %s\n\n" % (t.op,) err += function_as_string(t) - - err += "\n\nc_result: %s\np_result: %s\n" % (t.cresults, t.presults) - if t.with_maxcontext: - err += "max_result: %s\n\n" % (t.maxresults) - else: - err += "\n" - - err += "c_exceptions: %s\np_exceptions: %s\n" % (t.cex, t.pex) - if t.with_maxcontext: - err += "max_exceptions: %s\n\n" % t.maxex - else: - err += "\n" - - err += "%s\n" % str(t.context) - if t.with_maxcontext: - err += "%s\n" % str(t.maxcontext) - else: - err += "\n" + err += "\n\nc_result: %s\np_result: %s\n\n" % (t.cresults, t.presults) + err += "c_exceptions: %s\np_exceptions: %s\n\n" % (t.cex, t.pex) + err += "%s\n\n" % str(t.context) raise VerifyError(err) @@ -650,13 +603,6 @@ def raise_error(t): # are printed to stdout. # ====================================================================== -def all_nan(a): - if isinstance(a, C.Decimal): - return a.is_nan() - elif isinstance(a, tuple): - return all(all_nan(v) for v in a) - return False - def convert(t, convstr=True): """ t is the testset. At this stage the testset contains a tuple of operands t.op of various types. For decimal methods the first @@ -671,12 +617,10 @@ def convert(t, convstr=True): for i, op in enumerate(t.op): context.clear_status() - t.maxcontext.clear_flags() if op in RoundModes: t.cop.append(op) t.pop.append(op) - t.maxop.append(op) elif not t.contextfunc and i == 0 or \ convstr and isinstance(op, str): @@ -694,25 +638,11 @@ def convert(t, convstr=True): p = None pex = e.__class__ - try: - C.setcontext(t.maxcontext) - maxop = C.Decimal(op) - maxex = None - except (TypeError, ValueError, OverflowError) as e: - maxop = None - maxex = e.__class__ - finally: - C.setcontext(context.c) - t.cop.append(c) t.cex.append(cex) - t.pop.append(p) t.pex.append(pex) - t.maxop.append(maxop) - t.maxex.append(maxex) - if cex is pex: if str(c) != str(p) or not context.assert_eq_status(): raise_error(t) @@ -722,21 +652,14 @@ def convert(t, convstr=True): else: raise_error(t) - # The exceptions in the maxcontext operation can legitimately - # differ, only test that maxex implies cex: - if maxex is not None and cex is not maxex: - raise_error(t) - elif isinstance(op, Context): t.context = op t.cop.append(op.c) t.pop.append(op.p) - t.maxop.append(t.maxcontext) else: t.cop.append(op) t.pop.append(op) - t.maxop.append(op) return 1 @@ -750,7 +673,6 @@ def callfuncs(t): t.rc and t.rp are the results of the operation. """ context.clear_status() - t.maxcontext.clear_flags() try: if t.contextfunc: @@ -778,35 +700,6 @@ def callfuncs(t): t.rp = None t.pex.append(e.__class__) - # If the above results are exact, unrounded, normal etc., repeat the - # operation with a maxcontext to ensure that huge intermediate values - # do not cause a MemoryError. - if (t.funcname not in MaxContextSkip and - not context.c.flags[C.InvalidOperation] and - not context.c.flags[C.Inexact] and - not context.c.flags[C.Rounded] and - not context.c.flags[C.Subnormal] and - not context.c.flags[C.Clamped] and - not context.clamp and # results are padded to context.prec if context.clamp==1. - not any(isinstance(v, C.Context) for v in t.cop)): # another context is used. - t.with_maxcontext = True - try: - if t.contextfunc: - maxargs = t.maxop - t.rmax = getattr(t.maxcontext, t.funcname)(*maxargs) - else: - maxself = t.maxop[0] - maxargs = t.maxop[1:] - try: - C.setcontext(t.maxcontext) - t.rmax = getattr(maxself, t.funcname)(*maxargs) - finally: - C.setcontext(context.c) - t.maxex.append(None) - except (TypeError, ValueError, OverflowError, MemoryError) as e: - t.rmax = None - t.maxex.append(e.__class__) - def verify(t, stat): """ t is the testset. At this stage the testset contains the following tuples: @@ -821,9 +714,6 @@ def verify(t, stat): """ t.cresults.append(str(t.rc)) t.presults.append(str(t.rp)) - if t.with_maxcontext: - t.maxresults.append(str(t.rmax)) - if isinstance(t.rc, C.Decimal) and isinstance(t.rp, P.Decimal): # General case: both results are Decimals. t.cresults.append(t.rc.to_eng_string()) @@ -835,12 +725,6 @@ def verify(t, stat): t.presults.append(str(t.rp.imag)) t.presults.append(str(t.rp.real)) - if t.with_maxcontext and isinstance(t.rmax, C.Decimal): - t.maxresults.append(t.rmax.to_eng_string()) - t.maxresults.append(t.rmax.as_tuple()) - t.maxresults.append(str(t.rmax.imag)) - t.maxresults.append(str(t.rmax.real)) - nc = t.rc.number_class().lstrip('+-s') stat[nc] += 1 else: @@ -848,9 +732,6 @@ def verify(t, stat): if not isinstance(t.rc, tuple) and not isinstance(t.rp, tuple): if t.rc != t.rp: raise_error(t) - if t.with_maxcontext and not isinstance(t.rmax, tuple): - if t.rmax != t.rc: - raise_error(t) stat[type(t.rc).__name__] += 1 # The return value lists must be equal. @@ -863,20 +744,6 @@ def verify(t, stat): if not t.context.assert_eq_status(): raise_error(t) - if t.with_maxcontext: - # NaN payloads etc. depend on precision and clamp. - if all_nan(t.rc) and all_nan(t.rmax): - return - # The return value lists must be equal. - if t.maxresults != t.cresults: - raise_error(t) - # The Python exception lists (TypeError, etc.) must be equal. - if t.maxex != t.cex: - raise_error(t) - # The context flags must be equal. - if t.maxcontext.flags != t.context.c.flags: - raise_error(t) - # ====================================================================== # Main test loops From webhook-mailer at python.org Tue Jun 9 04:27:53 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Tue, 09 Jun 2020 08:27:53 -0000 Subject: [Python-checkins] Remove reference to 3.7 and 3.8 backports. (GH-20754) Message-ID: https://github.com/python/cpython/commit/323188360d61875bd68688ef41711bade298af50 commit: 323188360d61875bd68688ef41711bade298af50 branch: master author: Stefan Krah committer: GitHub date: 2020-06-09T10:27:45+02:00 summary: Remove reference to 3.7 and 3.8 backports. (GH-20754) files: M Doc/library/decimal.rst diff --git a/Doc/library/decimal.rst b/Doc/library/decimal.rst index 69a20fca17898..38ad04177c5e8 100644 --- a/Doc/library/decimal.rst +++ b/Doc/library/decimal.rst @@ -2193,4 +2193,3 @@ are expected to be exact. .. [#] .. versionchanged:: 3.9 This approach now works for all exact results except for non-integer powers. - Also backported to 3.7 and 3.8. From webhook-mailer at python.org Tue Jun 9 08:39:10 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 09 Jun 2020 12:39:10 -0000 Subject: [Python-checkins] Add quotes to code to be a string Message-ID: https://github.com/python/cpython/commit/4b378acb97a575892c0e372a6bb0c17da1ccdf3e commit: 4b378acb97a575892c0e372a6bb0c17da1ccdf3e branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-09T05:39:01-07:00 summary: Add quotes to code to be a string files: A Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst M Lib/codeop.py M Lib/test/test_codeop.py diff --git a/Lib/codeop.py b/Lib/codeop.py index 3c37f35eb0250..3c2bb6083561e 100644 --- a/Lib/codeop.py +++ b/Lib/codeop.py @@ -57,6 +57,7 @@ """ import __future__ +import warnings _features = [getattr(__future__, fname) for fname in __future__.all_feature_names] @@ -83,15 +84,18 @@ def _maybe_compile(compiler, source, filename, symbol): except SyntaxError as err: pass - try: - code1 = compiler(source + "\n", filename, symbol) - except SyntaxError as e: - err1 = e - - try: - code2 = compiler(source + "\n\n", filename, symbol) - except SyntaxError as e: - err2 = e + # Suppress warnings after the first compile to avoid duplication. + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + try: + code1 = compiler(source + "\n", filename, symbol) + except SyntaxError as e: + err1 = e + + try: + code2 = compiler(source + "\n\n", filename, symbol) + except SyntaxError as e: + err2 = e try: if code: diff --git a/Lib/test/test_codeop.py b/Lib/test/test_codeop.py index 4d52d15fa0fb3..8e278b9b2311e 100644 --- a/Lib/test/test_codeop.py +++ b/Lib/test/test_codeop.py @@ -294,6 +294,11 @@ def test_filename(self): self.assertNotEqual(compile_command("a = 1\n", "abc").co_filename, compile("a = 1\n", "def", 'single').co_filename) + def test_warning(self): + # Test that the warning is only returned once. + with support.check_warnings((".*invalid", DeprecationWarning)) as w: + compile_command("'\e'") + self.assertEqual(len(w.warnings), 1) if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst b/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst new file mode 100644 index 0000000000000..532b809b77eed --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst @@ -0,0 +1,2 @@ +Stop codeop._maybe_compile, used by code.InteractiveInterpreter (and IDLE). +from from emitting each warning three times. From webhook-mailer at python.org Tue Jun 9 09:32:52 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 09 Jun 2020 13:32:52 -0000 Subject: [Python-checkins] bpo-40684: Fix make install for platlibdir=lib64 (GH-20736) Message-ID: https://github.com/python/cpython/commit/51ae31e5b93b986e57a7e18e25f981a6ffcdefb7 commit: 51ae31e5b93b986e57a7e18e25f981a6ffcdefb7 branch: master author: Victor Stinner committer: GitHub date: 2020-06-09T15:32:43+02:00 summary: bpo-40684: Fix make install for platlibdir=lib64 (GH-20736) "make install" now uses the PLATLIBDIR variable for the destination lib-dynload/ directory when ./configure --with-platlibdir is used. Update --with-platlibdir comment in configure. files: A Misc/NEWS.d/next/Build/2020-06-08-19-57-05.bpo-40684.WIY2-i.rst M Makefile.pre.in M configure M configure.ac diff --git a/Makefile.pre.in b/Makefile.pre.in index 9cb7a23eea582..7c16d2905fbf4 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -148,7 +148,7 @@ SCRIPTDIR= $(prefix)/$(PLATLIBDIR) ABIFLAGS= @ABIFLAGS@ # Detailed destination directories -BINLIBDEST= $(LIBDIR)/python$(VERSION) +BINLIBDEST= @BINLIBDEST@ LIBDEST= $(SCRIPTDIR)/python$(VERSION) INCLUDEPY= $(INCLUDEDIR)/python$(LDVERSION) CONFINCLUDEPY= $(CONFINCLUDEDIR)/python$(LDVERSION) diff --git a/Misc/NEWS.d/next/Build/2020-06-08-19-57-05.bpo-40684.WIY2-i.rst b/Misc/NEWS.d/next/Build/2020-06-08-19-57-05.bpo-40684.WIY2-i.rst new file mode 100644 index 0000000000000..0495e5e413622 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-06-08-19-57-05.bpo-40684.WIY2-i.rst @@ -0,0 +1,2 @@ +``make install`` now uses the ``PLATLIBDIR`` variable for the destination +``lib-dynload/`` directory when ``./configure --with-platlibdir`` is used. diff --git a/configure b/configure index 1124412dce475..139c2bf7de132 100755 --- a/configure +++ b/configure @@ -632,6 +632,7 @@ THREADHEADERS LIBPL PY_ENABLE_SHARED PLATLIBDIR +BINLIBDEST LIBPYTHON EXT_SUFFIX ALT_SOABI @@ -15334,7 +15335,11 @@ else fi -# Check for --with-libdir-name + +BINLIBDEST='$(LIBDIR)/python$(VERSION)' + + +# Check for --with-platlibdir # /usr/$LIDIRNAME/python$VERSION PLATLIBDIR="lib" @@ -15353,6 +15358,7 @@ then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } PLATLIBDIR="$withval" + BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)' else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } diff --git a/configure.ac b/configure.ac index 84d1f00983f89..30856c8b6883d 100644 --- a/configure.ac +++ b/configure.ac @@ -4770,7 +4770,11 @@ else fi -# Check for --with-libdir-name +AC_SUBST(BINLIBDEST) +BINLIBDEST='$(LIBDIR)/python$(VERSION)' + + +# Check for --with-platlibdir # /usr/$LIDIRNAME/python$VERSION AC_SUBST(PLATLIBDIR) PLATLIBDIR="lib" @@ -4787,6 +4791,7 @@ if test -n "$withval" -a "$withval" != yes -a "$withval" != no then AC_MSG_RESULT(yes) PLATLIBDIR="$withval" + BINLIBDEST='${exec_prefix}/${PLATLIBDIR}/python$(VERSION)' else AC_MSG_RESULT(no) fi], From webhook-mailer at python.org Tue Jun 9 11:33:51 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Tue, 09 Jun 2020 15:33:51 -0000 Subject: [Python-checkins] Remove usesless function from csv module (GH-20762) Message-ID: https://github.com/python/cpython/commit/0383be4666905f9e24ca791afda845a7686b3fe3 commit: 0383be4666905f9e24ca791afda845a7686b3fe3 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-10T00:33:43+09:00 summary: Remove usesless function from csv module (GH-20762) files: M Modules/_csv.c diff --git a/Modules/_csv.c b/Modules/_csv.c index 7e44419c0876b..2d4247740eb29 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -146,13 +146,6 @@ get_dialect_from_registry(PyObject * name_obj) return dialect_obj; } -static PyObject * -get_string(PyObject *str) -{ - Py_XINCREF(str); - return str; -} - static PyObject * get_nullchar_as_None(Py_UCS4 c) { @@ -166,7 +159,8 @@ get_nullchar_as_None(Py_UCS4 c) static PyObject * Dialect_get_lineterminator(DialectObj *self, void *Py_UNUSED(ignored)) { - return get_string(self->lineterminator); + Py_XINCREF(self->lineterminator); + return self->lineterminator; } static PyObject * From webhook-mailer at python.org Tue Jun 9 21:26:20 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 10 Jun 2020 01:26:20 -0000 Subject: [Python-checkins] [3.8] bpo-34003: Re-add versionchanged entry in csv docs (GH-20657) (GH-20771) Message-ID: https://github.com/python/cpython/commit/663836e1179ea79eac12e55670af7e89a531a060 commit: 663836e1179ea79eac12e55670af7e89a531a060 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-09T18:26:16-07:00 summary: [3.8] bpo-34003: Re-add versionchanged entry in csv docs (GH-20657) (GH-20771) Follow-up to GH-8014 (cherry picked from commit 7aed0524d4129766a6032326949ef7f91f6f6dfc) Co-authored-by: ?ric Araujo Automerge-Triggered-By: @merwok files: M Doc/library/csv.rst diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst index 61d39828e0194..7a72c26d5bade 100644 --- a/Doc/library/csv.rst +++ b/Doc/library/csv.rst @@ -167,6 +167,9 @@ The :mod:`csv` module defines the following classes: All other optional or keyword arguments are passed to the underlying :class:`reader` instance. + .. versionchanged:: 3.6 + Returned rows are now of type :class:`OrderedDict`. + .. versionchanged:: 3.8 Returned rows are now of type :class:`dict`. From webhook-mailer at python.org Tue Jun 9 23:53:31 2020 From: webhook-mailer at python.org (Joannah Nanjekye) Date: Wed, 10 Jun 2020 03:53:31 -0000 Subject: [Python-checkins] bpo-32604: Recommit "bpo-32604: PEP 554 for use in test suite (GH-19985)" (GH-20611) Message-ID: https://github.com/python/cpython/commit/bae872f1fe9b3a0d3e3b8800a2ac8d6b440d6e4d commit: bae872f1fe9b3a0d3e3b8800a2ac8d6b440d6e4d branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: GitHub date: 2020-06-10T00:53:23-03:00 summary: bpo-32604: Recommit "bpo-32604: PEP 554 for use in test suite (GH-19985)" (GH-20611) * PEP 554 for use in test suite * ?? Added by blurb_it. * Fix space * Add doc to doc tree * Move to modules doc tree * Fix suspicious doc errors * Fix test__all * Docs docs docs * Support isolated and fix wait * Fix white space * Remove undefined from __all__ * Fix recv and add exceptions * Remove unused exceptions, fix pep 8 formatting errors and fix _NOT_SET in recv_nowait() * Update Lib/test/support/interpreters.py Co-authored-by: Pablo Galindo * Remove documentation (module is for internal use) Co-authored-by: nanjekyejoannah Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Lib/test/support/interpreters.py A Lib/test/test_interpreters.py A Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst diff --git a/Lib/test/support/interpreters.py b/Lib/test/support/interpreters.py new file mode 100644 index 0000000000000..09508e1bbeca0 --- /dev/null +++ b/Lib/test/support/interpreters.py @@ -0,0 +1,183 @@ +"""Subinterpreters High Level Module.""" + +import _xxsubinterpreters as _interpreters + +# aliases: +from _xxsubinterpreters import ( + ChannelError, ChannelNotFoundError, ChannelEmptyError, + is_shareable, +) + + +__all__ = [ + 'Interpreter', 'get_current', 'get_main', 'create', 'list_all', + 'SendChannel', 'RecvChannel', + 'create_channel', 'list_all_channels', 'is_shareable', + 'ChannelError', 'ChannelNotFoundError', + 'ChannelEmptyError', + ] + + +def create(*, isolated=True): + """ + Initialize a new (idle) Python interpreter. + """ + id = _interpreters.create(isolated=isolated) + return Interpreter(id, isolated=isolated) + + +def list_all(): + """ + Get all existing interpreters. + """ + return [Interpreter(id) for id in + _interpreters.list_all()] + + +def get_current(): + """ + Get the currently running interpreter. + """ + id = _interpreters.get_current() + return Interpreter(id) + + +def get_main(): + """ + Get the main interpreter. + """ + id = _interpreters.get_main() + return Interpreter(id) + + +class Interpreter: + """ + The Interpreter object represents + a single interpreter. + """ + + def __init__(self, id, *, isolated=None): + self._id = id + self._isolated = isolated + + @property + def id(self): + return self._id + + @property + def isolated(self): + if self._isolated is None: + self._isolated = _interpreters.is_isolated(self._id) + return self._isolated + + def is_running(self): + """ + Return whether or not the identified + interpreter is running. + """ + return _interpreters.is_running(self._id) + + def close(self): + """ + Finalize and destroy the interpreter. + + Attempting to destroy the current + interpreter results in a RuntimeError. + """ + return _interpreters.destroy(self._id) + + def run(self, src_str, /, *, channels=None): + """ + Run the given source code in the interpreter. + This blocks the current Python thread until done. + """ + _interpreters.run_string(self._id, src_str) + + +def create_channel(): + """ + Create a new channel for passing data between + interpreters. + """ + + cid = _interpreters.channel_create() + return (RecvChannel(cid), SendChannel(cid)) + + +def list_all_channels(): + """ + Get all open channels. + """ + return [(RecvChannel(cid), SendChannel(cid)) + for cid in _interpreters.channel_list_all()] + + +_NOT_SET = object() + + +class RecvChannel: + """ + The RecvChannel object represents + a receiving channel. + """ + + def __init__(self, id): + self._id = id + + def recv(self, *, _delay=10 / 1000): # 10 milliseconds + """ + Get the next object from the channel, + and wait if none have been sent. + Associate the interpreter with the channel. + """ + import time + sentinel = object() + obj = _interpreters.channel_recv(self._id, sentinel) + while obj is sentinel: + time.sleep(_delay) + obj = _interpreters.channel_recv(self._id, sentinel) + return obj + + def recv_nowait(self, default=_NOT_SET): + """ + Like recv(), but return the default + instead of waiting. + + This function is blocked by a missing low-level + implementation of channel_recv_wait(). + """ + if default is _NOT_SET: + return _interpreters.channel_recv(self._id) + else: + return _interpreters.channel_recv(self._id, default) + + +class SendChannel: + """ + The SendChannel object represents + a sending channel. + """ + + def __init__(self, id): + self._id = id + + def send(self, obj): + """ + Send the object (i.e. its data) to the receiving + end of the channel and wait. Associate the interpreter + with the channel. + """ + import time + _interpreters.channel_send(self._id, obj) + time.sleep(2) + + def send_nowait(self, obj): + """ + Like send(), but return False if not received. + + This function is blocked by a missing low-level + implementation of channel_send_wait(). + """ + + _interpreters.channel_send(self._id, obj) + return False diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py new file mode 100644 index 0000000000000..3451a4c8759d8 --- /dev/null +++ b/Lib/test/test_interpreters.py @@ -0,0 +1,535 @@ +import contextlib +import os +import threading +from textwrap import dedent +import unittest +import time + +import _xxsubinterpreters as _interpreters +from test.support import interpreters + + +def _captured_script(script): + r, w = os.pipe() + indented = script.replace('\n', '\n ') + wrapped = dedent(f""" + import contextlib + with open({w}, 'w') as spipe: + with contextlib.redirect_stdout(spipe): + {indented} + """) + return wrapped, open(r) + + +def clean_up_interpreters(): + for interp in interpreters.list_all(): + if interp.id == 0: # main + continue + try: + interp.close() + except RuntimeError: + pass # already destroyed + + +def _run_output(interp, request, shared=None): + script, rpipe = _captured_script(request) + with rpipe: + interp.run(script) + return rpipe.read() + + + at contextlib.contextmanager +def _running(interp): + r, w = os.pipe() + def run(): + interp.run(dedent(f""" + # wait for "signal" + with open({r}) as rpipe: + rpipe.read() + """)) + + t = threading.Thread(target=run) + t.start() + + yield + + with open(w, 'w') as spipe: + spipe.write('done') + t.join() + + +class TestBase(unittest.TestCase): + + def tearDown(self): + clean_up_interpreters() + + +class CreateTests(TestBase): + + def test_in_main(self): + interp = interpreters.create() + lst = interpreters.list_all() + self.assertEqual(interp.id, lst[1].id) + + def test_in_thread(self): + lock = threading.Lock() + id = None + interp = interpreters.create() + lst = interpreters.list_all() + def f(): + nonlocal id + id = interp.id + lock.acquire() + lock.release() + + t = threading.Thread(target=f) + with lock: + t.start() + t.join() + self.assertEqual(interp.id, lst[1].id) + + def test_in_subinterpreter(self): + main, = interpreters.list_all() + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + interp = interpreters.create() + print(interp) + """)) + interp2 = out.strip() + + self.assertEqual(len(set(interpreters.list_all())), len({main, interp, interp2})) + + def test_after_destroy_all(self): + before = set(interpreters.list_all()) + # Create 3 subinterpreters. + interp_lst = [] + for _ in range(3): + interps = interpreters.create() + interp_lst.append(interps) + # Now destroy them. + for interp in interp_lst: + interp.close() + # Finally, create another. + interp = interpreters.create() + self.assertEqual(len(set(interpreters.list_all())), len(before | {interp})) + + def test_after_destroy_some(self): + before = set(interpreters.list_all()) + # Create 3 subinterpreters. + interp1 = interpreters.create() + interp2 = interpreters.create() + interp3 = interpreters.create() + # Now destroy 2 of them. + interp1.close() + interp2.close() + # Finally, create another. + interp = interpreters.create() + self.assertEqual(len(set(interpreters.list_all())), len(before | {interp3, interp})) + + +class GetCurrentTests(TestBase): + + def test_main(self): + main_interp_id = _interpreters.get_main() + cur_interp_id = interpreters.get_current().id + self.assertEqual(cur_interp_id, main_interp_id) + + def test_subinterpreter(self): + main = _interpreters.get_main() + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + cur = interpreters.get_current() + print(cur) + """)) + cur = out.strip() + self.assertNotEqual(cur, main) + + +class ListAllTests(TestBase): + + def test_initial(self): + interps = interpreters.list_all() + self.assertEqual(1, len(interps)) + + def test_after_creating(self): + main = interpreters.get_current() + first = interpreters.create() + second = interpreters.create() + + ids = [] + for interp in interpreters.list_all(): + ids.append(interp.id) + + self.assertEqual(ids, [main.id, first.id, second.id]) + + def test_after_destroying(self): + main = interpreters.get_current() + first = interpreters.create() + second = interpreters.create() + first.close() + + ids = [] + for interp in interpreters.list_all(): + ids.append(interp.id) + + self.assertEqual(ids, [main.id, second.id]) + + +class TestInterpreterId(TestBase): + + def test_in_main(self): + main = interpreters.get_current() + self.assertEqual(0, main.id) + + def test_with_custom_num(self): + interp = interpreters.Interpreter(1) + self.assertEqual(1, interp.id) + + def test_for_readonly_property(self): + interp = interpreters.Interpreter(1) + with self.assertRaises(AttributeError): + interp.id = 2 + + +class TestInterpreterIsRunning(TestBase): + + def test_main(self): + main = interpreters.get_current() + self.assertTrue(main.is_running()) + + def test_subinterpreter(self): + interp = interpreters.create() + self.assertFalse(interp.is_running()) + + with _running(interp): + self.assertTrue(interp.is_running()) + self.assertFalse(interp.is_running()) + + def test_from_subinterpreter(self): + interp = interpreters.create() + out = _run_output(interp, dedent(f""" + import _xxsubinterpreters as _interpreters + if _interpreters.is_running({interp.id}): + print(True) + else: + print(False) + """)) + self.assertEqual(out.strip(), 'True') + + def test_already_destroyed(self): + interp = interpreters.create() + interp.close() + with self.assertRaises(RuntimeError): + interp.is_running() + + +class TestInterpreterDestroy(TestBase): + + def test_basic(self): + interp1 = interpreters.create() + interp2 = interpreters.create() + interp3 = interpreters.create() + self.assertEqual(4, len(interpreters.list_all())) + interp2.close() + self.assertEqual(3, len(interpreters.list_all())) + + def test_all(self): + before = set(interpreters.list_all()) + interps = set() + for _ in range(3): + interp = interpreters.create() + interps.add(interp) + self.assertEqual(len(set(interpreters.list_all())), len(before | interps)) + for interp in interps: + interp.close() + self.assertEqual(len(set(interpreters.list_all())), len(before)) + + def test_main(self): + main, = interpreters.list_all() + with self.assertRaises(RuntimeError): + main.close() + + def f(): + with self.assertRaises(RuntimeError): + main.close() + + t = threading.Thread(target=f) + t.start() + t.join() + + def test_already_destroyed(self): + interp = interpreters.create() + interp.close() + with self.assertRaises(RuntimeError): + interp.close() + + def test_from_current(self): + main, = interpreters.list_all() + interp = interpreters.create() + script = dedent(f""" + from test.support import interpreters + try: + main = interpreters.get_current() + main.close() + except RuntimeError: + pass + """) + + interp.run(script) + self.assertEqual(len(set(interpreters.list_all())), len({main, interp})) + + def test_from_sibling(self): + main, = interpreters.list_all() + interp1 = interpreters.create() + script = dedent(f""" + from test.support import interpreters + interp2 = interpreters.create() + interp2.close() + """) + interp1.run(script) + + self.assertEqual(len(set(interpreters.list_all())), len({main, interp1})) + + def test_from_other_thread(self): + interp = interpreters.create() + def f(): + interp.close() + + t = threading.Thread(target=f) + t.start() + t.join() + + def test_still_running(self): + main, = interpreters.list_all() + interp = interpreters.create() + with _running(interp): + with self.assertRaises(RuntimeError): + interp.close() + self.assertTrue(interp.is_running()) + + +class TestInterpreterRun(TestBase): + + SCRIPT = dedent(""" + with open('{}', 'w') as out: + out.write('{}') + """) + FILENAME = 'spam' + + def setUp(self): + super().setUp() + self.interp = interpreters.create() + self._fs = None + + def tearDown(self): + if self._fs is not None: + self._fs.close() + super().tearDown() + + @property + def fs(self): + if self._fs is None: + self._fs = FSFixture(self) + return self._fs + + def test_success(self): + script, file = _captured_script('print("it worked!", end="")') + with file: + self.interp.run(script) + out = file.read() + + self.assertEqual(out, 'it worked!') + + def test_in_thread(self): + script, file = _captured_script('print("it worked!", end="")') + with file: + def f(): + self.interp.run(script) + + t = threading.Thread(target=f) + t.start() + t.join() + out = file.read() + + self.assertEqual(out, 'it worked!') + + @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()") + def test_fork(self): + import tempfile + with tempfile.NamedTemporaryFile('w+') as file: + file.write('') + file.flush() + + expected = 'spam spam spam spam spam' + script = dedent(f""" + import os + try: + os.fork() + except RuntimeError: + with open('{file.name}', 'w') as out: + out.write('{expected}') + """) + self.interp.run(script) + + file.seek(0) + content = file.read() + self.assertEqual(content, expected) + + def test_already_running(self): + with _running(self.interp): + with self.assertRaises(RuntimeError): + self.interp.run('print("spam")') + + def test_bad_script(self): + with self.assertRaises(TypeError): + self.interp.run(10) + + def test_bytes_for_script(self): + with self.assertRaises(TypeError): + self.interp.run(b'print("spam")') + + +class TestIsShareable(TestBase): + + def test_default_shareables(self): + shareables = [ + # singletons + None, + # builtin objects + b'spam', + 'spam', + 10, + -10, + ] + for obj in shareables: + with self.subTest(obj): + self.assertTrue( + interpreters.is_shareable(obj)) + + def test_not_shareable(self): + class Cheese: + def __init__(self, name): + self.name = name + def __str__(self): + return self.name + + class SubBytes(bytes): + """A subclass of a shareable type.""" + + not_shareables = [ + # singletons + True, + False, + NotImplemented, + ..., + # builtin types and objects + type, + object, + object(), + Exception(), + 100.0, + # user-defined types and objects + Cheese, + Cheese('Wensleydale'), + SubBytes(b'spam'), + ] + for obj in not_shareables: + with self.subTest(repr(obj)): + self.assertFalse( + interpreters.is_shareable(obj)) + + +class TestChannel(TestBase): + + def test_create_cid(self): + r, s = interpreters.create_channel() + self.assertIsInstance(r, interpreters.RecvChannel) + self.assertIsInstance(s, interpreters.SendChannel) + + def test_sequential_ids(self): + before = interpreters.list_all_channels() + channels1 = interpreters.create_channel() + channels2 = interpreters.create_channel() + channels3 = interpreters.create_channel() + after = interpreters.list_all_channels() + + self.assertEqual(len(set(after) - set(before)), + len({channels1, channels2, channels3})) + + +class TestSendRecv(TestBase): + + def test_send_recv_main(self): + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv() + + self.assertEqual(obj, orig) + self.assertIsNot(obj, orig) + + def test_send_recv_same_interpreter(self): + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv() + assert obj is not orig + assert obj == orig + """)) + + def test_send_recv_different_threads(self): + r, s = interpreters.create_channel() + + def f(): + while True: + try: + obj = r.recv() + break + except interpreters.ChannelEmptyError: + time.sleep(0.1) + s.send(obj) + t = threading.Thread(target=f) + t.start() + + s.send(b'spam') + t.join() + obj = r.recv() + + self.assertEqual(obj, b'spam') + + def test_send_recv_nowait_main(self): + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv_nowait() + + self.assertEqual(obj, orig) + self.assertIsNot(obj, orig) + + def test_send_recv_nowait_same_interpreter(self): + interp = interpreters.create() + out = _run_output(interp, dedent(""" + from test.support import interpreters + r, s = interpreters.create_channel() + orig = b'spam' + s.send(orig) + obj = r.recv_nowait() + assert obj is not orig + assert obj == orig + """)) + + r, s = interpreters.create_channel() + + def f(): + while True: + try: + obj = r.recv_nowait() + break + except _interpreters.ChannelEmptyError: + time.sleep(0.1) + s.send(obj) diff --git a/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst b/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst new file mode 100644 index 0000000000000..1129cd7649b96 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-07-22-00-12.bpo-39881.E1xsNv.rst @@ -0,0 +1,2 @@ +PEP 554 for use in the test suite. +(Patch By Joannah Nanjekye) \ No newline at end of file From webhook-mailer at python.org Wed Jun 10 01:57:09 2020 From: webhook-mailer at python.org (Dennis Sweeney) Date: Wed, 10 Jun 2020 05:57:09 -0000 Subject: [Python-checkins] bpo-40889: Optimize dict.items() ^ dict.items() (GH-20718) Message-ID: https://github.com/python/cpython/commit/07d81128124f2b574808e33267c38b104b42ae2a commit: 07d81128124f2b574808e33267c38b104b42ae2a branch: master author: Dennis Sweeney <36520290+sweeneyde at users.noreply.github.com> committer: GitHub date: 2020-06-10T14:56:56+09:00 summary: bpo-40889: Optimize dict.items() ^ dict.items() (GH-20718) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-08-22-46-33.bpo-40889.vIBl-W.rst M Lib/test/test_dict.py M Objects/dictobject.c diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index 6b8596fff6a9f..5c08810f879b1 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -697,6 +697,16 @@ def test_dictview_set_operations_on_items(self): self.assertEqual(k1 ^ k2, {(3,3)}) self.assertEqual(k1 ^ k3, {(1,1), (2,2), (4,4)}) + def test_items_symmetric_difference(self): + rr = random.randrange + for _ in range(100): + left = {x:rr(3) for x in range(20) if rr(2)} + right = {x:rr(3) for x in range(20) if rr(2)} + with self.subTest(left=left, right=right): + expected = set(left.items()) ^ set(right.items()) + actual = left.items() ^ right.items() + self.assertEqual(actual, expected) + def test_dictview_mixed_set_operations(self): # Just a few for .keys() self.assertTrue({1:1}.keys() == {1}) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-08-22-46-33.bpo-40889.vIBl-W.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-08-22-46-33.bpo-40889.vIBl-W.rst new file mode 100644 index 0000000000000..0ab1a261e3e6e --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-08-22-46-33.bpo-40889.vIBl-W.rst @@ -0,0 +1 @@ +Improved the performance of symmetric difference operations on dictionary item views. Patch by Dennis Sweeney. \ No newline at end of file diff --git a/Objects/dictobject.c b/Objects/dictobject.c index c4d5da51f3193..1bb8cfdab2b68 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -4409,9 +4409,99 @@ dictviews_or(PyObject* self, PyObject *other) return result; } +static PyObject * +dictitems_xor(PyObject *self, PyObject *other) +{ + assert(PyDictItems_Check(self)); + assert(PyDictItems_Check(other)); + PyObject *d1 = (PyObject *)((_PyDictViewObject *)self)->dv_dict; + PyObject *d2 = (PyObject *)((_PyDictViewObject *)other)->dv_dict; + + PyObject *temp_dict = PyDict_Copy(d1); + if (temp_dict == NULL) { + return NULL; + } + PyObject *result_set = PySet_New(NULL); + if (result_set == NULL) { + Py_CLEAR(temp_dict); + return NULL; + } + + PyObject *key = NULL, *val1 = NULL, *val2 = NULL; + Py_ssize_t pos = 0; + Py_hash_t hash; + + while (_PyDict_Next(d2, &pos, &key, &val2, &hash)) { + Py_INCREF(key); + Py_INCREF(val2); + val1 = _PyDict_GetItem_KnownHash(temp_dict, key, hash); + + int to_delete; + if (val1 == NULL) { + if (PyErr_Occurred()) { + goto error; + } + to_delete = 0; + } + else { + Py_INCREF(val1); + to_delete = PyObject_RichCompareBool(val1, val2, Py_EQ); + if (to_delete < 0) { + goto error; + } + } + + if (to_delete) { + if (_PyDict_DelItem_KnownHash(temp_dict, key, hash) < 0) { + goto error; + } + } + else { + PyObject *pair = PyTuple_Pack(2, key, val2); + if (pair == NULL) { + goto error; + } + if (PySet_Add(result_set, pair) < 0) { + Py_DECREF(pair); + goto error; + } + Py_DECREF(pair); + } + Py_DECREF(key); + Py_XDECREF(val1); + Py_DECREF(val2); + } + key = val1 = val2 = NULL; + + _Py_IDENTIFIER(items); + PyObject *remaining_pairs = _PyObject_CallMethodIdNoArgs(temp_dict, + &PyId_items); + if (remaining_pairs == NULL) { + goto error; + } + if (_PySet_Update(result_set, remaining_pairs) < 0) { + Py_DECREF(remaining_pairs); + goto error; + } + Py_DECREF(temp_dict); + Py_DECREF(remaining_pairs); + return result_set; + +error: + Py_XDECREF(temp_dict); + Py_XDECREF(result_set); + Py_XDECREF(key); + Py_XDECREF(val1); + Py_XDECREF(val2); + return NULL; +} + static PyObject* dictviews_xor(PyObject* self, PyObject *other) { + if (PyDictItems_Check(self) && PyDictItems_Check(other)) { + return dictitems_xor(self, other); + } PyObject *result = dictviews_to_set(self); if (result == NULL) { return NULL; From webhook-mailer at python.org Wed Jun 10 08:29:07 2020 From: webhook-mailer at python.org (Hai Shi) Date: Wed, 10 Jun 2020 12:29:07 -0000 Subject: [Python-checkins] bpo-40275: Add os_helper submodule in test.support (GH-20765) Message-ID: https://github.com/python/cpython/commit/0d00b2a5d74390da7bbeff7dfa73abf2eb46124a commit: 0d00b2a5d74390da7bbeff7dfa73abf2eb46124a branch: master author: Hai Shi committer: GitHub date: 2020-06-10T14:29:02+02:00 summary: bpo-40275: Add os_helper submodule in test.support (GH-20765) files: A Lib/test/support/os_helper.py M Doc/library/test.rst M Lib/test/support/__init__.py diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 7580fb5e9b174..11d748466cba2 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -247,41 +247,6 @@ The :mod:`test.support` module defines the following constants: Path for shell if not on Windows; otherwise ``None``. -.. data:: FS_NONASCII - - A non-ASCII character encodable by :func:`os.fsencode`. - - -.. data:: TESTFN - - Set to a name that is safe to use as the name of a temporary file. Any - temporary file that is created should be closed and unlinked (removed). - - -.. data:: TESTFN_UNICODE - - Set to a non-ASCII name for a temporary file. - - -.. data:: TESTFN_UNENCODABLE - - Set to a filename (str type) that should not be able to be encoded by file - system encoding in strict mode. It may be ``None`` if it's not possible to - generate such a filename. - - -.. data:: TESTFN_UNDECODABLE - - Set to a filename (bytes type) that should not be able to be decoded by - file system encoding in strict mode. It may be ``None`` if it's not - possible to generate such a filename. - - -.. data:: TESTFN_NONASCII - - Set to a filename containing the :data:`FS_NONASCII` character. - - .. data:: LOOPBACK_TIMEOUT Timeout in seconds for tests using a network server listening on the network @@ -343,11 +308,6 @@ The :mod:`test.support` module defines the following constants: :data:`SHORT_TIMEOUT`. -.. data:: SAVEDCWD - - Set to :func:`os.getcwd`. - - .. data:: PGO Set when tests can be skipped when they are not useful for PGO. @@ -449,25 +409,6 @@ The :mod:`test.support` module defines the following functions: Delete *name* from ``sys.modules``. -.. function:: unlink(filename) - - Call :func:`os.unlink` on *filename*. On Windows platforms, this is - wrapped with a wait loop that checks for the existence fo the file. - - -.. function:: rmdir(filename) - - Call :func:`os.rmdir` on *filename*. On Windows platforms, this is - wrapped with a wait loop that checks for the existence of the file. - - -.. function:: rmtree(path) - - Call :func:`shutil.rmtree` on *path* or call :func:`os.lstat` and - :func:`os.rmdir` to remove a path and its contents. On Windows platforms, - this is wrapped with a wait loop that checks for the existence of the files. - - .. function:: make_legacy_pyc(source) Move a :pep:`3147`/:pep:`488` pyc file to its legacy pyc location and return the file @@ -521,16 +462,6 @@ The :mod:`test.support` module defines the following functions: rather than looking directly in the path directories. -.. function:: create_empty_file(filename) - - Create an empty file with *filename*. If it already exists, truncate it. - - -.. function:: fd_count() - - Count the number of open file descriptors. - - .. function:: match_test(test) Match *test* to patterns set in :func:`set_match_tests`. @@ -713,47 +644,6 @@ The :mod:`test.support` module defines the following functions: self.assertEqual(captured, "hello") -.. function:: temp_dir(path=None, quiet=False) - - A context manager that creates a temporary directory at *path* and - yields the directory. - - If *path* is ``None``, the temporary directory is created using - :func:`tempfile.mkdtemp`. If *quiet* is ``False``, the context manager - raises an exception on error. Otherwise, if *path* is specified and - cannot be created, only a warning is issued. - - -.. function:: change_cwd(path, quiet=False) - - A context manager that temporarily changes the current working - directory to *path* and yields the directory. - - If *quiet* is ``False``, the context manager raises an exception - on error. Otherwise, it issues only a warning and keeps the current - working directory the same. - - -.. function:: temp_cwd(name='tempcwd', quiet=False) - - A context manager that temporarily creates a new directory and - changes the current working directory (CWD). - - The context manager creates a temporary directory in the current - directory with name *name* before temporarily changing the current - working directory. If *name* is ``None``, the temporary directory is - created using :func:`tempfile.mkdtemp`. - - If *quiet* is ``False`` and it is not possible to create or change - the CWD, an error is raised. Otherwise, only a warning is raised - and the original CWD is used. - - -.. function:: temp_umask(umask) - - A context manager that temporarily sets the process umask. - - .. function:: disable_faulthandler() A context manager that replaces ``sys.stderr`` with ``sys.__stderr__``. @@ -851,28 +741,6 @@ The :mod:`test.support` module defines the following functions: header size equals *size*. -.. function:: can_symlink() - - Return ``True`` if the OS supports symbolic links, ``False`` - otherwise. - - -.. function:: can_xattr() - - Return ``True`` if the OS supports xattr, ``False`` - otherwise. - - -.. decorator:: skip_unless_symlink - - A decorator for running tests that require support for symbolic links. - - -.. decorator:: skip_unless_xattr - - A decorator for running tests that require support for xattr. - - .. decorator:: anticipate_failure(condition) A decorator to conditionally mark tests with @@ -992,12 +860,6 @@ The :mod:`test.support` module defines the following functions: wrap. -.. function:: make_bad_fd() - - Create an invalid file descriptor by opening and closing a temporary file, - and returning its descriptor. - - .. function:: check_syntax_error(testcase, statement, errtext='', *, lineno=None, offset=None) Test for syntax errors in *statement* by attempting to compile *statement*. @@ -1144,11 +1006,6 @@ The :mod:`test.support` module defines the following functions: return load_package_tests(os.path.dirname(__file__), *args) -.. function:: fs_is_case_insensitive(directory) - - Return ``True`` if the file system for *directory* is case-insensitive. - - .. function:: detect_api_mismatch(ref_api, other_api, *, ignore=()) Returns the set of attributes, functions or methods of *ref_api* not @@ -1241,28 +1098,6 @@ The :mod:`test.support` module defines the following classes: attributes on the exception is :exc:`ResourceDenied` raised. -.. class:: EnvironmentVarGuard() - - Class used to temporarily set or unset environment variables. Instances can - be used as a context manager and have a complete dictionary interface for - querying/modifying the underlying ``os.environ``. After exit from the - context manager all changes to environment variables done through this - instance will be rolled back. - - .. versionchanged:: 3.1 - Added dictionary interface. - -.. method:: EnvironmentVarGuard.set(envvar, value) - - Temporarily set the environment variable ``envvar`` to the value of - ``value``. - - -.. method:: EnvironmentVarGuard.unset(envvar) - - Temporarily unset the environment variable ``envvar``. - - .. class:: SuppressCrashReport() A context manager used to try to prevent crash dialog popups on tests that @@ -1332,13 +1167,6 @@ The :mod:`test.support` module defines the following classes: Run *test* and return the result. -.. class:: FakePath(path) - - Simple :term:`path-like object`. It implements the :meth:`__fspath__` - method which just returns the *path* argument. If *path* is an exception, - it will be raised in :meth:`!__fspath__`. - - :mod:`test.support.socket_helper` --- Utilities for socket tests ================================================================ @@ -1634,3 +1462,187 @@ The :mod:`test.support.threading_helper` module provides support for threading t # (to avoid reference cycles) .. versionadded:: 3.8 + + +:mod:`test.support.os_helper` --- Utilities for os tests +======================================================================== + +.. module:: test.support.os_helper + :synopsis: Support for os tests. + +The :mod:`test.support.os_helper` module provides support for os tests. + +.. versionadded:: 3.10 + + +.. data:: FS_NONASCII + + A non-ASCII character encodable by :func:`os.fsencode`. + + +.. data:: SAVEDCWD + + Set to :func:`os.getcwd`. + + +.. data:: TESTFN + + Set to a name that is safe to use as the name of a temporary file. Any + temporary file that is created should be closed and unlinked (removed). + + +.. data:: TESTFN_NONASCII + + Set to a filename containing the :data:`FS_NONASCII` character. + + +.. data:: TESTFN_UNENCODABLE + + Set to a filename (str type) that should not be able to be encoded by file + system encoding in strict mode. It may be ``None`` if it's not possible to + generate such a filename. + + +.. data:: TESTFN_UNDECODABLE + + Set to a filename (bytes type) that should not be able to be decoded by + file system encoding in strict mode. It may be ``None`` if it's not + possible to generate such a filename. + + +.. data:: TESTFN_UNICODE + + Set to a non-ASCII name for a temporary file. + + +.. class:: EnvironmentVarGuard() + + Class used to temporarily set or unset environment variables. Instances can + be used as a context manager and have a complete dictionary interface for + querying/modifying the underlying ``os.environ``. After exit from the + context manager all changes to environment variables done through this + instance will be rolled back. + + .. versionchanged:: 3.1 + Added dictionary interface. + + +.. class:: FakePath(path) + + Simple :term:`path-like object`. It implements the :meth:`__fspath__` + method which just returns the *path* argument. If *path* is an exception, + it will be raised in :meth:`!__fspath__`. + + +.. method:: EnvironmentVarGuard.set(envvar, value) + + Temporarily set the environment variable ``envvar`` to the value of + ``value``. + + +.. method:: EnvironmentVarGuard.unset(envvar) + + Temporarily unset the environment variable ``envvar``. + + +.. function:: can_symlink() + + Return ``True`` if the OS supports symbolic links, ``False`` + otherwise. + + +.. function:: can_xattr() + + Return ``True`` if the OS supports xattr, ``False`` + otherwise. + + +.. function:: change_cwd(path, quiet=False) + + A context manager that temporarily changes the current working + directory to *path* and yields the directory. + + If *quiet* is ``False``, the context manager raises an exception + on error. Otherwise, it issues only a warning and keeps the current + working directory the same. + + +.. function:: create_empty_file(filename) + + Create an empty file with *filename*. If it already exists, truncate it. + + +.. function:: fd_count() + + Count the number of open file descriptors. + + +.. function:: fs_is_case_insensitive(directory) + + Return ``True`` if the file system for *directory* is case-insensitive. + + +.. function:: make_bad_fd() + + Create an invalid file descriptor by opening and closing a temporary file, + and returning its descriptor. + + +.. function:: rmdir(filename) + + Call :func:`os.rmdir` on *filename*. On Windows platforms, this is + wrapped with a wait loop that checks for the existence of the file. + + +.. function:: rmtree(path) + + Call :func:`shutil.rmtree` on *path* or call :func:`os.lstat` and + :func:`os.rmdir` to remove a path and its contents. On Windows platforms, + this is wrapped with a wait loop that checks for the existence of the files. + + +.. decorator:: skip_unless_symlink + + A decorator for running tests that require support for symbolic links. + + +.. decorator:: skip_unless_xattr + + A decorator for running tests that require support for xattr. + + +.. function:: temp_cwd(name='tempcwd', quiet=False) + + A context manager that temporarily creates a new directory and + changes the current working directory (CWD). + + The context manager creates a temporary directory in the current + directory with name *name* before temporarily changing the current + working directory. If *name* is ``None``, the temporary directory is + created using :func:`tempfile.mkdtemp`. + + If *quiet* is ``False`` and it is not possible to create or change + the CWD, an error is raised. Otherwise, only a warning is raised + and the original CWD is used. + + +.. function:: temp_dir(path=None, quiet=False) + + A context manager that creates a temporary directory at *path* and + yields the directory. + + If *path* is ``None``, the temporary directory is created using + :func:`tempfile.mkdtemp`. If *quiet* is ``False``, the context manager + raises an exception on error. Otherwise, if *path* is specified and + cannot be created, only a warning is issued. + + +.. function:: temp_umask(umask) + + A context manager that temporarily sets the process umask. + + +.. function:: unlink(filename) + + Call :func:`os.unlink` on *filename*. On Windows platforms, this is + wrapped with a wait loop that checks for the existence fo the file. diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index bb905bd895de8..3a5f7b556d767 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -3,7 +3,6 @@ if __name__ != 'test.support': raise ImportError('support must be imported from the test package') -import collections.abc import contextlib import errno import fnmatch @@ -22,8 +21,19 @@ import unittest import warnings +from .os_helper import ( + FS_NONASCII, SAVEDCWD, TESTFN, TESTFN_NONASCII, + TESTFN_UNENCODABLE, TESTFN_UNDECODABLE, + TESTFN_UNICODE, can_symlink, can_xattr, + change_cwd, create_empty_file, fd_count, + fs_is_case_insensitive, make_bad_fd, rmdir, + rmtree, skip_unless_symlink, skip_unless_xattr, + temp_cwd, temp_dir, temp_umask, unlink, + EnvironmentVarGuard, FakePath, _longpath) + from .testresult import get_test_runner + __all__ = [ # globals "PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast", @@ -36,18 +46,15 @@ # io "record_original_stdout", "get_original_stdout", "captured_stdout", "captured_stdin", "captured_stderr", - # filesystem - "TESTFN", "SAVEDCWD", "unlink", "rmtree", "temp_cwd", "findfile", - "create_empty_file", "can_symlink", "fs_is_case_insensitive", # unittest "is_resource_enabled", "requires", "requires_freebsd_version", "requires_linux_version", "requires_mac_ver", "check_syntax_error", "check_syntax_warning", "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", "BasicTestRunner", "run_unittest", "run_doctest", - "skip_unless_symlink", "requires_gzip", "requires_bz2", "requires_lzma", + "requires_gzip", "requires_bz2", "requires_lzma", "bigmemtest", "bigaddrspacetest", "cpython_only", "get_attribute", - "requires_IEEE_754", "skip_unless_xattr", "requires_zlib", + "requires_IEEE_754", "requires_zlib", "anticipate_failure", "load_package_tests", "detect_api_mismatch", "check__all__", "skip_if_buggy_ucrt_strfptime", "ignore_warnings", @@ -57,13 +64,12 @@ # network "open_urlresource", # processes - 'temp_umask', "reap_children", + "reap_children", # miscellaneous "check_warnings", "check_no_resource_warning", "check_no_warnings", - "EnvironmentVarGuard", - "run_with_locale", "swap_item", + "run_with_locale", "swap_item", "findfile", "swap_attr", "Matcher", "set_memlimit", "SuppressCrashReport", "sortdict", - "run_with_tz", "PGO", "missing_compiler_executable", "fd_count", + "run_with_tz", "PGO", "missing_compiler_executable", "ALWAYS_EQ", "NEVER_EQ", "LARGEST", "SMALLEST", "LOOPBACK_TIMEOUT", "INTERNET_TIMEOUT", "SHORT_TIMEOUT", "LONG_TIMEOUT", ] @@ -318,6 +324,7 @@ def unload(name): except KeyError: pass + def _force_run(path, func, *args): try: return func(*args) @@ -328,124 +335,6 @@ def _force_run(path, func, *args): os.chmod(path, stat.S_IRWXU) return func(*args) -if sys.platform.startswith("win"): - def _waitfor(func, pathname, waitall=False): - # Perform the operation - func(pathname) - # Now setup the wait loop - if waitall: - dirname = pathname - else: - dirname, name = os.path.split(pathname) - dirname = dirname or '.' - # Check for `pathname` to be removed from the filesystem. - # The exponential backoff of the timeout amounts to a total - # of ~1 second after which the deletion is probably an error - # anyway. - # Testing on an i7 at 4.3GHz shows that usually only 1 iteration is - # required when contention occurs. - timeout = 0.001 - while timeout < 1.0: - # Note we are only testing for the existence of the file(s) in - # the contents of the directory regardless of any security or - # access rights. If we have made it this far, we have sufficient - # permissions to do that much using Python's equivalent of the - # Windows API FindFirstFile. - # Other Windows APIs can fail or give incorrect results when - # dealing with files that are pending deletion. - L = os.listdir(dirname) - if not (L if waitall else name in L): - return - # Increase the timeout and try again - time.sleep(timeout) - timeout *= 2 - warnings.warn('tests may fail, delete still pending for ' + pathname, - RuntimeWarning, stacklevel=4) - - def _unlink(filename): - _waitfor(os.unlink, filename) - - def _rmdir(dirname): - _waitfor(os.rmdir, dirname) - - def _rmtree(path): - def _rmtree_inner(path): - for name in _force_run(path, os.listdir, path): - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except OSError as exc: - print("support.rmtree(): os.lstat(%r) failed with %s" % (fullname, exc), - file=sys.__stderr__) - mode = 0 - if stat.S_ISDIR(mode): - _waitfor(_rmtree_inner, fullname, waitall=True) - _force_run(fullname, os.rmdir, fullname) - else: - _force_run(fullname, os.unlink, fullname) - _waitfor(_rmtree_inner, path, waitall=True) - _waitfor(lambda p: _force_run(p, os.rmdir, p), path) - - def _longpath(path): - try: - import ctypes - except ImportError: - # No ctypes means we can't expands paths. - pass - else: - buffer = ctypes.create_unicode_buffer(len(path) * 2) - length = ctypes.windll.kernel32.GetLongPathNameW(path, buffer, - len(buffer)) - if length: - return buffer[:length] - return path -else: - _unlink = os.unlink - _rmdir = os.rmdir - - def _rmtree(path): - import shutil - try: - shutil.rmtree(path) - return - except OSError: - pass - - def _rmtree_inner(path): - for name in _force_run(path, os.listdir, path): - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except OSError: - mode = 0 - if stat.S_ISDIR(mode): - _rmtree_inner(fullname) - _force_run(path, os.rmdir, fullname) - else: - _force_run(path, os.unlink, fullname) - _rmtree_inner(path) - os.rmdir(path) - - def _longpath(path): - return path - -def unlink(filename): - try: - _unlink(filename) - except (FileNotFoundError, NotADirectoryError): - pass - -def rmdir(dirname): - try: - _rmdir(dirname) - except FileNotFoundError: - pass - -def rmtree(path): - try: - _rmtree(path) - except FileNotFoundError: - pass def make_legacy_pyc(source): """Move a PEP 3147/488 pyc file to its legacy pyc location. @@ -714,149 +603,10 @@ def requires_lzma(reason='requires lzma'): else: unix_shell = None -# Filename used for testing -if os.name == 'java': - # Jython disallows @ in module names - TESTFN = '$test' -else: - TESTFN = '@test' - -# Disambiguate TESTFN for parallel testing, while letting it remain a valid -# module name. -TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid()) - # Define the URL of a dedicated HTTP server for the network tests. # The URL must use clear-text HTTP: no redirection to encrypted HTTPS. TEST_HTTP_URL = "http://www.pythontest.net" -# FS_NONASCII: non-ASCII character encodable by os.fsencode(), -# or None if there is no such character. -FS_NONASCII = None -for character in ( - # First try printable and common characters to have a readable filename. - # For each character, the encoding list are just example of encodings able - # to encode the character (the list is not exhaustive). - - # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 - '\u00E6', - # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 - '\u0130', - # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257 - '\u0141', - # U+03C6 (Greek Small Letter Phi): cp1253 - '\u03C6', - # U+041A (Cyrillic Capital Letter Ka): cp1251 - '\u041A', - # U+05D0 (Hebrew Letter Alef): Encodable to cp424 - '\u05D0', - # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic - '\u060C', - # U+062A (Arabic Letter Teh): cp720 - '\u062A', - # U+0E01 (Thai Character Ko Kai): cp874 - '\u0E01', - - # Then try more "special" characters. "special" because they may be - # interpreted or displayed differently depending on the exact locale - # encoding and the font. - - # U+00A0 (No-Break Space) - '\u00A0', - # U+20AC (Euro Sign) - '\u20AC', -): - try: - # If Python is set up to use the legacy 'mbcs' in Windows, - # 'replace' error mode is used, and encode() returns b'?' - # for characters missing in the ANSI codepage - if os.fsdecode(os.fsencode(character)) != character: - raise UnicodeError - except UnicodeError: - pass - else: - FS_NONASCII = character - break - -# TESTFN_UNICODE is a non-ascii filename -TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" -if sys.platform == 'darwin': - # In Mac OS X's VFS API file names are, by definition, canonically - # decomposed Unicode, encoded using UTF-8. See QA1173: - # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html - import unicodedata - TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) - -# TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be -# encoded by the filesystem encoding (in strict mode). It can be None if we -# cannot generate such filename. -TESTFN_UNENCODABLE = None -if os.name == 'nt': - # skip win32s (0) or Windows 9x/ME (1) - if sys.getwindowsversion().platform >= 2: - # Different kinds of characters from various languages to minimize the - # probability that the whole name is encodable to MBCS (issue #9819) - TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" - try: - TESTFN_UNENCODABLE.encode(sys.getfilesystemencoding()) - except UnicodeEncodeError: - pass - else: - print('WARNING: The filename %r CAN be encoded by the filesystem encoding (%s). ' - 'Unicode filename tests may not be effective' - % (TESTFN_UNENCODABLE, sys.getfilesystemencoding())) - TESTFN_UNENCODABLE = None -# Mac OS X denies unencodable filenames (invalid utf-8) -elif sys.platform != 'darwin': - try: - # ascii and utf-8 cannot encode the byte 0xff - b'\xff'.decode(sys.getfilesystemencoding()) - except UnicodeDecodeError: - # 0xff will be encoded using the surrogate character u+DCFF - TESTFN_UNENCODABLE = TESTFN \ - + b'-\xff'.decode(sys.getfilesystemencoding(), 'surrogateescape') - else: - # File system encoding (eg. ISO-8859-* encodings) can encode - # the byte 0xff. Skip some unicode filename tests. - pass - -# TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be -# decoded from the filesystem encoding (in strict mode). It can be None if we -# cannot generate such filename (ex: the latin1 encoding can decode any byte -# sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks -# to the surrogateescape error handler (PEP 383), but not from the filesystem -# encoding in strict mode. -TESTFN_UNDECODABLE = None -for name in ( - # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows - # accepts it to create a file or a directory, or don't accept to enter to - # such directory (when the bytes name is used). So test b'\xe7' first: it is - # not decodable from cp932. - b'\xe7w\xf0', - # undecodable from ASCII, UTF-8 - b'\xff', - # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856 - # and cp857 - b'\xae\xd5' - # undecodable from UTF-8 (UNIX and Mac OS X) - b'\xed\xb2\x80', b'\xed\xb4\x80', - # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252, - # cp1253, cp1254, cp1255, cp1257, cp1258 - b'\x81\x98', -): - try: - name.decode(sys.getfilesystemencoding()) - except UnicodeDecodeError: - TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name - break - -if FS_NONASCII: - TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII -else: - TESTFN_NONASCII = None - -# Save the initial cwd -SAVEDCWD = os.getcwd() - # Set by libregrtest/main.py so we can skip tests that are not # useful for PGO PGO = False @@ -865,103 +615,6 @@ def requires_lzma(reason='requires lzma'): # PGO task. If this is True, PGO is also True. PGO_EXTENDED = False - at contextlib.contextmanager -def temp_dir(path=None, quiet=False): - """Return a context manager that creates a temporary directory. - - Arguments: - - path: the directory to create temporarily. If omitted or None, - defaults to creating a temporary directory using tempfile.mkdtemp. - - quiet: if False (the default), the context manager raises an exception - on error. Otherwise, if the path is specified and cannot be - created, only a warning is issued. - - """ - import tempfile - dir_created = False - if path is None: - path = tempfile.mkdtemp() - dir_created = True - path = os.path.realpath(path) - else: - try: - os.mkdir(path) - dir_created = True - except OSError as exc: - if not quiet: - raise - warnings.warn(f'tests may fail, unable to create ' - f'temporary directory {path!r}: {exc}', - RuntimeWarning, stacklevel=3) - if dir_created: - pid = os.getpid() - try: - yield path - finally: - # In case the process forks, let only the parent remove the - # directory. The child has a different process id. (bpo-30028) - if dir_created and pid == os.getpid(): - rmtree(path) - - at contextlib.contextmanager -def change_cwd(path, quiet=False): - """Return a context manager that changes the current working directory. - - Arguments: - - path: the directory to use as the temporary current working directory. - - quiet: if False (the default), the context manager raises an exception - on error. Otherwise, it issues only a warning and keeps the current - working directory the same. - - """ - saved_dir = os.getcwd() - try: - os.chdir(os.path.realpath(path)) - except OSError as exc: - if not quiet: - raise - warnings.warn(f'tests may fail, unable to change the current working ' - f'directory to {path!r}: {exc}', - RuntimeWarning, stacklevel=3) - try: - yield os.getcwd() - finally: - os.chdir(saved_dir) - - - at contextlib.contextmanager -def temp_cwd(name='tempcwd', quiet=False): - """ - Context manager that temporarily creates and changes the CWD. - - The function temporarily changes the current working directory - after creating a temporary directory in the current directory with - name *name*. If *name* is None, the temporary directory is - created using tempfile.mkdtemp. - - If *quiet* is False (default) and it is not possible to - create or change the CWD, an error is raised. If *quiet* is True, - only a warning is raised and the original CWD is used. - - """ - with temp_dir(path=name, quiet=quiet) as temp_path: - with change_cwd(temp_path, quiet=quiet) as cwd_dir: - yield cwd_dir - -if hasattr(os, "umask"): - @contextlib.contextmanager - def temp_umask(umask): - """Context manager that temporarily sets the process umask.""" - oldmask = os.umask(umask) - try: - yield - finally: - os.umask(oldmask) - # TEST_HOME_DIR refers to the top level directory of the "test" package # that contains Python's regression test suite TEST_SUPPORT_DIR = os.path.dirname(os.path.abspath(__file__)) @@ -970,6 +623,7 @@ def temp_umask(umask): # TEST_DATA_DIR is used as a target download location for remote resources TEST_DATA_DIR = os.path.join(TEST_HOME_DIR, "data") + def findfile(filename, subdir=None): """Try to find a file on sys.path or in the test directory. If it is not found the argument passed to the function is returned (this does not @@ -988,10 +642,6 @@ def findfile(filename, subdir=None): if os.path.exists(fn): return fn return filename -def create_empty_file(filename): - """Create an empty file. If the file already exists, truncate it.""" - fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) - os.close(fd) def sortdict(dict): "Like repr(dict), but in sorted order." @@ -1000,19 +650,6 @@ def sortdict(dict): withcommas = ", ".join(reprpairs) return "{%s}" % withcommas -def make_bad_fd(): - """ - Create an invalid file descriptor by opening and closing a file and return - its fd. - """ - file = open(TESTFN, "wb") - try: - return file.fileno() - finally: - file.close() - unlink(TESTFN) - - def check_syntax_error(testcase, statement, errtext='', *, lineno=None, offset=None): with testcase.assertRaisesRegex(SyntaxError, errtext) as cm: compile(statement, '', 'exec') @@ -1265,59 +902,6 @@ def __exit__(self, *ignore_exc): sys.modules.update(self.original_modules) -class EnvironmentVarGuard(collections.abc.MutableMapping): - - """Class to help protect the environment variable properly. Can be used as - a context manager.""" - - def __init__(self): - self._environ = os.environ - self._changed = {} - - def __getitem__(self, envvar): - return self._environ[envvar] - - def __setitem__(self, envvar, value): - # Remember the initial value on the first access - if envvar not in self._changed: - self._changed[envvar] = self._environ.get(envvar) - self._environ[envvar] = value - - def __delitem__(self, envvar): - # Remember the initial value on the first access - if envvar not in self._changed: - self._changed[envvar] = self._environ.get(envvar) - if envvar in self._environ: - del self._environ[envvar] - - def keys(self): - return self._environ.keys() - - def __iter__(self): - return iter(self._environ) - - def __len__(self): - return len(self._environ) - - def set(self, envvar, value): - self[envvar] = value - - def unset(self, envvar): - del self[envvar] - - def __enter__(self): - return self - - def __exit__(self, *ignore_exc): - for (k, v) in self._changed.items(): - if v is None: - if k in self._environ: - del self._environ[k] - else: - self._environ[k] = v - os.environ = self._environ - - class DirsOnSysPath(object): """Context manager to temporarily add directories to sys.path. @@ -2133,28 +1717,6 @@ def match_value(self, k, dv, v): return result -_can_symlink = None -def can_symlink(): - global _can_symlink - if _can_symlink is not None: - return _can_symlink - symlink_path = TESTFN + "can_symlink" - try: - os.symlink(TESTFN, symlink_path) - can = True - except (OSError, NotImplementedError, AttributeError): - can = False - else: - os.remove(symlink_path) - _can_symlink = can - return can - -def skip_unless_symlink(test): - """Skip decorator for tests that require functional symlink""" - ok = can_symlink() - msg = "Requires functional symlink implementation" - return test if ok else unittest.skip(msg)(test) - _buggy_ucrt = None def skip_if_buggy_ucrt_strfptime(test): """ @@ -2256,45 +1818,6 @@ def call_link(self, *args, returncode=0): return self._call(self.link, args, self._env, returncode) -_can_xattr = None -def can_xattr(): - import tempfile - global _can_xattr - if _can_xattr is not None: - return _can_xattr - if not hasattr(os, "setxattr"): - can = False - else: - import platform - tmp_dir = tempfile.mkdtemp() - tmp_fp, tmp_name = tempfile.mkstemp(dir=tmp_dir) - try: - with open(TESTFN, "wb") as fp: - try: - # TESTFN & tempfile may use different file systems with - # different capabilities - os.setxattr(tmp_fp, b"user.test", b"") - os.setxattr(tmp_name, b"trusted.foo", b"42") - os.setxattr(fp.fileno(), b"user.test", b"") - # Kernels < 2.6.39 don't respect setxattr flags. - kernel_version = platform.release() - m = re.match(r"2.6.(\d{1,2})", kernel_version) - can = m is None or int(m.group(1)) >= 39 - except OSError: - can = False - finally: - unlink(TESTFN) - unlink(tmp_name) - rmdir(tmp_dir) - _can_xattr = can - return can - -def skip_unless_xattr(test): - """Skip decorator for tests that require functional extended attributes""" - ok = can_xattr() - msg = "no non-broken extended attribute support" - return test if ok else unittest.skip(msg)(test) - def skip_if_pgo_task(test): """Skip decorator for tests not run in (non-extended) PGO task""" ok = not PGO or PGO_EXTENDED @@ -2302,20 +1825,6 @@ def skip_if_pgo_task(test): return test if ok else unittest.skip(msg)(test) -def fs_is_case_insensitive(directory): - """Detects if the file system for the specified directory is case-insensitive.""" - import tempfile - with tempfile.NamedTemporaryFile(dir=directory) as base: - base_path = base.name - case_path = base_path.upper() - if case_path == base_path: - case_path = base_path.lower() - try: - return os.path.samefile(base_path, case_path) - except FileNotFoundError: - return False - - def detect_api_mismatch(ref_api, other_api, *, ignore=()): """Returns the set of items in ref_api not in other_api, except for a defined list of items to be ignored in this check. @@ -2623,65 +2132,6 @@ def disable_faulthandler(): faulthandler.enable(file=fd, all_threads=True) -def fd_count(): - """Count the number of open file descriptors. - """ - if sys.platform.startswith(('linux', 'freebsd')): - try: - names = os.listdir("/proc/self/fd") - # Subtract one because listdir() internally opens a file - # descriptor to list the content of the /proc/self/fd/ directory. - return len(names) - 1 - except FileNotFoundError: - pass - - MAXFD = 256 - if hasattr(os, 'sysconf'): - try: - MAXFD = os.sysconf("SC_OPEN_MAX") - except OSError: - pass - - old_modes = None - if sys.platform == 'win32': - # bpo-25306, bpo-31009: Call CrtSetReportMode() to not kill the process - # on invalid file descriptor if Python is compiled in debug mode - try: - import msvcrt - msvcrt.CrtSetReportMode - except (AttributeError, ImportError): - # no msvcrt or a release build - pass - else: - old_modes = {} - for report_type in (msvcrt.CRT_WARN, - msvcrt.CRT_ERROR, - msvcrt.CRT_ASSERT): - old_modes[report_type] = msvcrt.CrtSetReportMode(report_type, 0) - - try: - count = 0 - for fd in range(MAXFD): - try: - # Prefer dup() over fstat(). fstat() can require input/output - # whereas dup() doesn't. - fd2 = os.dup(fd) - except OSError as e: - if e.errno != errno.EBADF: - raise - else: - os.close(fd2) - count += 1 - finally: - if old_modes is not None: - for report_type in (msvcrt.CRT_WARN, - msvcrt.CRT_ERROR, - msvcrt.CRT_ASSERT): - msvcrt.CrtSetReportMode(report_type, old_modes[report_type]) - - return count - - class SaveSignals: """ Save and restore signal handlers. @@ -2726,24 +2176,6 @@ def with_pymalloc(): return _testcapi.WITH_PYMALLOC -class FakePath: - """Simple implementing of the path protocol. - """ - def __init__(self, path): - self.path = path - - def __repr__(self): - return f'' - - def __fspath__(self): - if (isinstance(self.path, BaseException) or - isinstance(self.path, type) and - issubclass(self.path, BaseException)): - raise self.path - else: - return self.path - - class _ALWAYS_EQ: """ Object that is equal to anything. diff --git a/Lib/test/support/os_helper.py b/Lib/test/support/os_helper.py new file mode 100644 index 0000000000000..d3347027cf204 --- /dev/null +++ b/Lib/test/support/os_helper.py @@ -0,0 +1,611 @@ +import collections.abc +import contextlib +import errno +import os +import re +import stat +import sys +import time +import unittest +import warnings + + +# Filename used for testing +if os.name == 'java': + # Jython disallows @ in module names + TESTFN = '$test' +else: + TESTFN = '@test' + +# Disambiguate TESTFN for parallel testing, while letting it remain a valid +# module name. +TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid()) + +# TESTFN_UNICODE is a non-ascii filename +TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" +if sys.platform == 'darwin': + # In Mac OS X's VFS API file names are, by definition, canonically + # decomposed Unicode, encoded using UTF-8. See QA1173: + # http://developer.apple.com/mac/library/qa/qa2001/qa1173.html + import unicodedata + TESTFN_UNICODE = unicodedata.normalize('NFD', TESTFN_UNICODE) + +# TESTFN_UNENCODABLE is a filename (str type) that should *not* be able to be +# encoded by the filesystem encoding (in strict mode). It can be None if we +# cannot generate such filename. +TESTFN_UNENCODABLE = None +if os.name == 'nt': + # skip win32s (0) or Windows 9x/ME (1) + if sys.getwindowsversion().platform >= 2: + # Different kinds of characters from various languages to minimize the + # probability that the whole name is encodable to MBCS (issue #9819) + TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" + try: + TESTFN_UNENCODABLE.encode(sys.getfilesystemencoding()) + except UnicodeEncodeError: + pass + else: + print('WARNING: The filename %r CAN be encoded by the filesystem ' + 'encoding (%s). Unicode filename tests may not be effective' + % (TESTFN_UNENCODABLE, sys.getfilesystemencoding())) + TESTFN_UNENCODABLE = None +# Mac OS X denies unencodable filenames (invalid utf-8) +elif sys.platform != 'darwin': + try: + # ascii and utf-8 cannot encode the byte 0xff + b'\xff'.decode(sys.getfilesystemencoding()) + except UnicodeDecodeError: + # 0xff will be encoded using the surrogate character u+DCFF + TESTFN_UNENCODABLE = TESTFN \ + + b'-\xff'.decode(sys.getfilesystemencoding(), 'surrogateescape') + else: + # File system encoding (eg. ISO-8859-* encodings) can encode + # the byte 0xff. Skip some unicode filename tests. + pass + +# FS_NONASCII: non-ASCII character encodable by os.fsencode(), +# or None if there is no such character. +FS_NONASCII = None +for character in ( + # First try printable and common characters to have a readable filename. + # For each character, the encoding list are just example of encodings able + # to encode the character (the list is not exhaustive). + + # U+00E6 (Latin Small Letter Ae): cp1252, iso-8859-1 + '\u00E6', + # U+0130 (Latin Capital Letter I With Dot Above): cp1254, iso8859_3 + '\u0130', + # U+0141 (Latin Capital Letter L With Stroke): cp1250, cp1257 + '\u0141', + # U+03C6 (Greek Small Letter Phi): cp1253 + '\u03C6', + # U+041A (Cyrillic Capital Letter Ka): cp1251 + '\u041A', + # U+05D0 (Hebrew Letter Alef): Encodable to cp424 + '\u05D0', + # U+060C (Arabic Comma): cp864, cp1006, iso8859_6, mac_arabic + '\u060C', + # U+062A (Arabic Letter Teh): cp720 + '\u062A', + # U+0E01 (Thai Character Ko Kai): cp874 + '\u0E01', + + # Then try more "special" characters. "special" because they may be + # interpreted or displayed differently depending on the exact locale + # encoding and the font. + + # U+00A0 (No-Break Space) + '\u00A0', + # U+20AC (Euro Sign) + '\u20AC', +): + try: + # If Python is set up to use the legacy 'mbcs' in Windows, + # 'replace' error mode is used, and encode() returns b'?' + # for characters missing in the ANSI codepage + if os.fsdecode(os.fsencode(character)) != character: + raise UnicodeError + except UnicodeError: + pass + else: + FS_NONASCII = character + break + +# Save the initial cwd +SAVEDCWD = os.getcwd() + +# TESTFN_UNDECODABLE is a filename (bytes type) that should *not* be able to be +# decoded from the filesystem encoding (in strict mode). It can be None if we +# cannot generate such filename (ex: the latin1 encoding can decode any byte +# sequence). On UNIX, TESTFN_UNDECODABLE can be decoded by os.fsdecode() thanks +# to the surrogateescape error handler (PEP 383), but not from the filesystem +# encoding in strict mode. +TESTFN_UNDECODABLE = None +for name in ( + # b'\xff' is not decodable by os.fsdecode() with code page 932. Windows + # accepts it to create a file or a directory, or don't accept to enter to + # such directory (when the bytes name is used). So test b'\xe7' first: + # it is not decodable from cp932. + b'\xe7w\xf0', + # undecodable from ASCII, UTF-8 + b'\xff', + # undecodable from iso8859-3, iso8859-6, iso8859-7, cp424, iso8859-8, cp856 + # and cp857 + b'\xae\xd5' + # undecodable from UTF-8 (UNIX and Mac OS X) + b'\xed\xb2\x80', b'\xed\xb4\x80', + # undecodable from shift_jis, cp869, cp874, cp932, cp1250, cp1251, cp1252, + # cp1253, cp1254, cp1255, cp1257, cp1258 + b'\x81\x98', +): + try: + name.decode(sys.getfilesystemencoding()) + except UnicodeDecodeError: + TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name + break + +if FS_NONASCII: + TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII +else: + TESTFN_NONASCII = None + + +def make_bad_fd(): + """ + Create an invalid file descriptor by opening and closing a file and return + its fd. + """ + file = open(TESTFN, "wb") + try: + return file.fileno() + finally: + file.close() + unlink(TESTFN) + + +_can_symlink = None + + +def can_symlink(): + global _can_symlink + if _can_symlink is not None: + return _can_symlink + symlink_path = TESTFN + "can_symlink" + try: + os.symlink(TESTFN, symlink_path) + can = True + except (OSError, NotImplementedError, AttributeError): + can = False + else: + os.remove(symlink_path) + _can_symlink = can + return can + + +def skip_unless_symlink(test): + """Skip decorator for tests that require functional symlink""" + ok = can_symlink() + msg = "Requires functional symlink implementation" + return test if ok else unittest.skip(msg)(test) + + +_can_xattr = None + + +def can_xattr(): + import tempfile + global _can_xattr + if _can_xattr is not None: + return _can_xattr + if not hasattr(os, "setxattr"): + can = False + else: + import platform + tmp_dir = tempfile.mkdtemp() + tmp_fp, tmp_name = tempfile.mkstemp(dir=tmp_dir) + try: + with open(TESTFN, "wb") as fp: + try: + # TESTFN & tempfile may use different file systems with + # different capabilities + os.setxattr(tmp_fp, b"user.test", b"") + os.setxattr(tmp_name, b"trusted.foo", b"42") + os.setxattr(fp.fileno(), b"user.test", b"") + # Kernels < 2.6.39 don't respect setxattr flags. + kernel_version = platform.release() + m = re.match(r"2.6.(\d{1,2})", kernel_version) + can = m is None or int(m.group(1)) >= 39 + except OSError: + can = False + finally: + unlink(TESTFN) + unlink(tmp_name) + rmdir(tmp_dir) + _can_xattr = can + return can + + +def skip_unless_xattr(test): + """Skip decorator for tests that require functional extended attributes""" + ok = can_xattr() + msg = "no non-broken extended attribute support" + return test if ok else unittest.skip(msg)(test) + + +def unlink(filename): + try: + _unlink(filename) + except (FileNotFoundError, NotADirectoryError): + pass + + +if sys.platform.startswith("win"): + def _waitfor(func, pathname, waitall=False): + # Perform the operation + func(pathname) + # Now setup the wait loop + if waitall: + dirname = pathname + else: + dirname, name = os.path.split(pathname) + dirname = dirname or '.' + # Check for `pathname` to be removed from the filesystem. + # The exponential backoff of the timeout amounts to a total + # of ~1 second after which the deletion is probably an error + # anyway. + # Testing on an i7 at 4.3GHz shows that usually only 1 iteration is + # required when contention occurs. + timeout = 0.001 + while timeout < 1.0: + # Note we are only testing for the existence of the file(s) in + # the contents of the directory regardless of any security or + # access rights. If we have made it this far, we have sufficient + # permissions to do that much using Python's equivalent of the + # Windows API FindFirstFile. + # Other Windows APIs can fail or give incorrect results when + # dealing with files that are pending deletion. + L = os.listdir(dirname) + if not (L if waitall else name in L): + return + # Increase the timeout and try again + time.sleep(timeout) + timeout *= 2 + warnings.warn('tests may fail, delete still pending for ' + pathname, + RuntimeWarning, stacklevel=4) + + def _unlink(filename): + _waitfor(os.unlink, filename) + + def _rmdir(dirname): + _waitfor(os.rmdir, dirname) + + def _rmtree(path): + from test.support import _force_run + + def _rmtree_inner(path): + for name in _force_run(path, os.listdir, path): + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except OSError as exc: + print("support.rmtree(): os.lstat(%r) failed with %s" + % (fullname, exc), + file=sys.__stderr__) + mode = 0 + if stat.S_ISDIR(mode): + _waitfor(_rmtree_inner, fullname, waitall=True) + _force_run(fullname, os.rmdir, fullname) + else: + _force_run(fullname, os.unlink, fullname) + _waitfor(_rmtree_inner, path, waitall=True) + _waitfor(lambda p: _force_run(p, os.rmdir, p), path) + + def _longpath(path): + try: + import ctypes + except ImportError: + # No ctypes means we can't expands paths. + pass + else: + buffer = ctypes.create_unicode_buffer(len(path) * 2) + length = ctypes.windll.kernel32.GetLongPathNameW(path, buffer, + len(buffer)) + if length: + return buffer[:length] + return path +else: + _unlink = os.unlink + _rmdir = os.rmdir + + def _rmtree(path): + import shutil + try: + shutil.rmtree(path) + return + except OSError: + pass + + def _rmtree_inner(path): + from test.support import _force_run + for name in _force_run(path, os.listdir, path): + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except OSError: + mode = 0 + if stat.S_ISDIR(mode): + _rmtree_inner(fullname) + _force_run(path, os.rmdir, fullname) + else: + _force_run(path, os.unlink, fullname) + _rmtree_inner(path) + os.rmdir(path) + + def _longpath(path): + return path + + +def rmdir(dirname): + try: + _rmdir(dirname) + except FileNotFoundError: + pass + + +def rmtree(path): + try: + _rmtree(path) + except FileNotFoundError: + pass + + + at contextlib.contextmanager +def temp_dir(path=None, quiet=False): + """Return a context manager that creates a temporary directory. + + Arguments: + + path: the directory to create temporarily. If omitted or None, + defaults to creating a temporary directory using tempfile.mkdtemp. + + quiet: if False (the default), the context manager raises an exception + on error. Otherwise, if the path is specified and cannot be + created, only a warning is issued. + + """ + import tempfile + dir_created = False + if path is None: + path = tempfile.mkdtemp() + dir_created = True + path = os.path.realpath(path) + else: + try: + os.mkdir(path) + dir_created = True + except OSError as exc: + if not quiet: + raise + warnings.warn(f'tests may fail, unable to create ' + f'temporary directory {path!r}: {exc}', + RuntimeWarning, stacklevel=3) + if dir_created: + pid = os.getpid() + try: + yield path + finally: + # In case the process forks, let only the parent remove the + # directory. The child has a different process id. (bpo-30028) + if dir_created and pid == os.getpid(): + rmtree(path) + + + at contextlib.contextmanager +def change_cwd(path, quiet=False): + """Return a context manager that changes the current working directory. + + Arguments: + + path: the directory to use as the temporary current working directory. + + quiet: if False (the default), the context manager raises an exception + on error. Otherwise, it issues only a warning and keeps the current + working directory the same. + + """ + saved_dir = os.getcwd() + try: + os.chdir(os.path.realpath(path)) + except OSError as exc: + if not quiet: + raise + warnings.warn(f'tests may fail, unable to change the current working ' + f'directory to {path!r}: {exc}', + RuntimeWarning, stacklevel=3) + try: + yield os.getcwd() + finally: + os.chdir(saved_dir) + + + at contextlib.contextmanager +def temp_cwd(name='tempcwd', quiet=False): + """ + Context manager that temporarily creates and changes the CWD. + + The function temporarily changes the current working directory + after creating a temporary directory in the current directory with + name *name*. If *name* is None, the temporary directory is + created using tempfile.mkdtemp. + + If *quiet* is False (default) and it is not possible to + create or change the CWD, an error is raised. If *quiet* is True, + only a warning is raised and the original CWD is used. + + """ + with temp_dir(path=name, quiet=quiet) as temp_path: + with change_cwd(temp_path, quiet=quiet) as cwd_dir: + yield cwd_dir + + +def create_empty_file(filename): + """Create an empty file. If the file already exists, truncate it.""" + fd = os.open(filename, os.O_WRONLY | os.O_CREAT | os.O_TRUNC) + os.close(fd) + + +def fs_is_case_insensitive(directory): + """Detects if the file system for the specified directory + is case-insensitive.""" + import tempfile + with tempfile.NamedTemporaryFile(dir=directory) as base: + base_path = base.name + case_path = base_path.upper() + if case_path == base_path: + case_path = base_path.lower() + try: + return os.path.samefile(base_path, case_path) + except FileNotFoundError: + return False + + +class FakePath: + """Simple implementing of the path protocol. + """ + def __init__(self, path): + self.path = path + + def __repr__(self): + return f'' + + def __fspath__(self): + if (isinstance(self.path, BaseException) or + isinstance(self.path, type) and + issubclass(self.path, BaseException)): + raise self.path + else: + return self.path + + +def fd_count(): + """Count the number of open file descriptors. + """ + if sys.platform.startswith(('linux', 'freebsd')): + try: + names = os.listdir("/proc/self/fd") + # Subtract one because listdir() internally opens a file + # descriptor to list the content of the /proc/self/fd/ directory. + return len(names) - 1 + except FileNotFoundError: + pass + + MAXFD = 256 + if hasattr(os, 'sysconf'): + try: + MAXFD = os.sysconf("SC_OPEN_MAX") + except OSError: + pass + + old_modes = None + if sys.platform == 'win32': + # bpo-25306, bpo-31009: Call CrtSetReportMode() to not kill the process + # on invalid file descriptor if Python is compiled in debug mode + try: + import msvcrt + msvcrt.CrtSetReportMode + except (AttributeError, ImportError): + # no msvcrt or a release build + pass + else: + old_modes = {} + for report_type in (msvcrt.CRT_WARN, + msvcrt.CRT_ERROR, + msvcrt.CRT_ASSERT): + old_modes[report_type] = msvcrt.CrtSetReportMode(report_type, + 0) + + try: + count = 0 + for fd in range(MAXFD): + try: + # Prefer dup() over fstat(). fstat() can require input/output + # whereas dup() doesn't. + fd2 = os.dup(fd) + except OSError as e: + if e.errno != errno.EBADF: + raise + else: + os.close(fd2) + count += 1 + finally: + if old_modes is not None: + for report_type in (msvcrt.CRT_WARN, + msvcrt.CRT_ERROR, + msvcrt.CRT_ASSERT): + msvcrt.CrtSetReportMode(report_type, old_modes[report_type]) + + return count + + +if hasattr(os, "umask"): + @contextlib.contextmanager + def temp_umask(umask): + """Context manager that temporarily sets the process umask.""" + oldmask = os.umask(umask) + try: + yield + finally: + os.umask(oldmask) + + +class EnvironmentVarGuard(collections.abc.MutableMapping): + + """Class to help protect the environment variable properly. Can be used as + a context manager.""" + + def __init__(self): + self._environ = os.environ + self._changed = {} + + def __getitem__(self, envvar): + return self._environ[envvar] + + def __setitem__(self, envvar, value): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + self._environ[envvar] = value + + def __delitem__(self, envvar): + # Remember the initial value on the first access + if envvar not in self._changed: + self._changed[envvar] = self._environ.get(envvar) + if envvar in self._environ: + del self._environ[envvar] + + def keys(self): + return self._environ.keys() + + def __iter__(self): + return iter(self._environ) + + def __len__(self): + return len(self._environ) + + def set(self, envvar, value): + self[envvar] = value + + def unset(self, envvar): + del self[envvar] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + for (k, v) in self._changed.items(): + if v is None: + if k in self._environ: + del self._environ[k] + else: + self._environ[k] = v + os.environ = self._environ From webhook-mailer at python.org Wed Jun 10 09:07:11 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Wed, 10 Jun 2020 13:07:11 -0000 Subject: [Python-checkins] Raise specialised syntax error for invalid lambda parameters (GH-20776) Message-ID: https://github.com/python/cpython/commit/c6483c989694cfa328dabd45eb191440da54bc68 commit: c6483c989694cfa328dabd45eb191440da54bc68 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-10T14:07:06+01:00 summary: Raise specialised syntax error for invalid lambda parameters (GH-20776) files: M Grammar/python.gram M Lib/test/test_positional_only_arg.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index dd425eff30b7d..2c350ef68a214 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -329,7 +329,11 @@ expression[expr_ty] (memo): | lambdef lambdef[expr_ty]: - | 'lambda' a=[lambda_parameters] ':' b=expression { _Py_Lambda((a) ? a : CHECK(_PyPegen_empty_arguments(p)), b, EXTRA) } + | 'lambda' a=[lambda_params] ':' b=expression { _Py_Lambda((a) ? a : CHECK(_PyPegen_empty_arguments(p)), b, EXTRA) } + +lambda_params[arguments_ty]: + | invalid_lambda_parameters + | lambda_parameters # lambda_parameters etc. duplicates parameters but without annotations # or type comments, and if there's no comma after a parameter, we expect @@ -669,6 +673,9 @@ invalid_dict_comprehension: invalid_parameters: | param_no_default* (slash_with_default | param_with_default+) param_no_default { RAISE_SYNTAX_ERROR("non-default argument follows default argument") } +invalid_lambda_parameters: + | lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default { + RAISE_SYNTAX_ERROR("non-default argument follows default argument") } invalid_star_etc: | '*' (')' | ',' (')' | '**')) { RAISE_SYNTAX_ERROR("named arguments must follow bare *") } | '*' ',' TYPE_COMMENT { RAISE_SYNTAX_ERROR("bare * has associated type comment") } diff --git a/Lib/test/test_positional_only_arg.py b/Lib/test/test_positional_only_arg.py index f7bd401804364..0a9503e2025d6 100644 --- a/Lib/test/test_positional_only_arg.py +++ b/Lib/test/test_positional_only_arg.py @@ -4,7 +4,7 @@ import pickle import unittest -from test.support import check_syntax_error, use_old_parser +from test.support import check_syntax_error def global_pos_only_f(a, b, /): @@ -23,12 +23,10 @@ def assertRaisesSyntaxError(self, codestr, regex="invalid syntax"): compile(codestr + "\n", "", "single") def test_invalid_syntax_errors(self): - if use_old_parser(): - check_syntax_error(self, "def f(a, b = 5, /, c): pass", "non-default argument follows default argument") - check_syntax_error(self, "def f(a = 5, b, /, c): pass", "non-default argument follows default argument") - check_syntax_error(self, "def f(a = 5, b=1, /, c, *, d=2): pass", "non-default argument follows default argument") - check_syntax_error(self, "def f(a = 5, b, /): pass", "non-default argument follows default argument") - + check_syntax_error(self, "def f(a, b = 5, /, c): pass", "non-default argument follows default argument") + check_syntax_error(self, "def f(a = 5, b, /, c): pass", "non-default argument follows default argument") + check_syntax_error(self, "def f(a = 5, b=1, /, c, *, d=2): pass", "non-default argument follows default argument") + check_syntax_error(self, "def f(a = 5, b, /): pass", "non-default argument follows default argument") check_syntax_error(self, "def f(*args, /): pass") check_syntax_error(self, "def f(*args, a, /): pass") check_syntax_error(self, "def f(**kwargs, /): pass") @@ -46,12 +44,10 @@ def test_invalid_syntax_errors(self): check_syntax_error(self, "def f(a, *, c, /, d, e): pass") def test_invalid_syntax_errors_async(self): - if use_old_parser(): - check_syntax_error(self, "async def f(a, b = 5, /, c): pass", "non-default argument follows default argument") - check_syntax_error(self, "async def f(a = 5, b, /, c): pass", "non-default argument follows default argument") - check_syntax_error(self, "async def f(a = 5, b=1, /, c, d=2): pass", "non-default argument follows default argument") - check_syntax_error(self, "async def f(a = 5, b, /): pass", "non-default argument follows default argument") - + check_syntax_error(self, "async def f(a, b = 5, /, c): pass", "non-default argument follows default argument") + check_syntax_error(self, "async def f(a = 5, b, /, c): pass", "non-default argument follows default argument") + check_syntax_error(self, "async def f(a = 5, b=1, /, c, d=2): pass", "non-default argument follows default argument") + check_syntax_error(self, "async def f(a = 5, b, /): pass", "non-default argument follows default argument") check_syntax_error(self, "async def f(*args, /): pass") check_syntax_error(self, "async def f(*args, a, /): pass") check_syntax_error(self, "async def f(**kwargs, /): pass") @@ -235,11 +231,9 @@ def test_lambdas(self): self.assertEqual(x(1, 2), 3) def test_invalid_syntax_lambda(self): - if use_old_parser(): - check_syntax_error(self, "lambda a, b = 5, /, c: None", "non-default argument follows default argument") - check_syntax_error(self, "lambda a = 5, b, /, c: None", "non-default argument follows default argument") - check_syntax_error(self, "lambda a = 5, b, /: None", "non-default argument follows default argument") - + check_syntax_error(self, "lambda a, b = 5, /, c: None", "non-default argument follows default argument") + check_syntax_error(self, "lambda a = 5, b, /, c: None", "non-default argument follows default argument") + check_syntax_error(self, "lambda a = 5, b, /: None", "non-default argument follows default argument") check_syntax_error(self, "lambda *args, /: None") check_syntax_error(self, "lambda *args, a, /: None") check_syntax_error(self, "lambda **kwargs, /: None") diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index e5738e3e04afe..4f13bf772f261 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -144,244 +144,249 @@ static KeywordToken *reserved_keywords[] = { #define expressions_type 1066 #define expression_type 1067 #define lambdef_type 1068 -#define lambda_parameters_type 1069 -#define lambda_slash_no_default_type 1070 -#define lambda_slash_with_default_type 1071 -#define lambda_star_etc_type 1072 -#define lambda_kwds_type 1073 -#define lambda_param_no_default_type 1074 -#define lambda_param_with_default_type 1075 -#define lambda_param_maybe_default_type 1076 -#define lambda_param_type 1077 -#define disjunction_type 1078 -#define conjunction_type 1079 -#define inversion_type 1080 -#define comparison_type 1081 -#define compare_op_bitwise_or_pair_type 1082 -#define eq_bitwise_or_type 1083 -#define noteq_bitwise_or_type 1084 -#define lte_bitwise_or_type 1085 -#define lt_bitwise_or_type 1086 -#define gte_bitwise_or_type 1087 -#define gt_bitwise_or_type 1088 -#define notin_bitwise_or_type 1089 -#define in_bitwise_or_type 1090 -#define isnot_bitwise_or_type 1091 -#define is_bitwise_or_type 1092 -#define bitwise_or_type 1093 // Left-recursive -#define bitwise_xor_type 1094 // Left-recursive -#define bitwise_and_type 1095 // Left-recursive -#define shift_expr_type 1096 // Left-recursive -#define sum_type 1097 // Left-recursive -#define term_type 1098 // Left-recursive -#define factor_type 1099 -#define power_type 1100 -#define await_primary_type 1101 -#define primary_type 1102 // Left-recursive -#define slices_type 1103 -#define slice_type 1104 -#define atom_type 1105 -#define strings_type 1106 -#define list_type 1107 -#define listcomp_type 1108 -#define tuple_type 1109 -#define group_type 1110 -#define genexp_type 1111 -#define set_type 1112 -#define setcomp_type 1113 -#define dict_type 1114 -#define dictcomp_type 1115 -#define double_starred_kvpairs_type 1116 -#define double_starred_kvpair_type 1117 -#define kvpair_type 1118 -#define for_if_clauses_type 1119 -#define for_if_clause_type 1120 -#define yield_expr_type 1121 -#define arguments_type 1122 -#define args_type 1123 -#define kwargs_type 1124 -#define starred_expression_type 1125 -#define kwarg_or_starred_type 1126 -#define kwarg_or_double_starred_type 1127 -#define star_targets_type 1128 -#define star_targets_seq_type 1129 -#define star_target_type 1130 -#define star_atom_type 1131 -#define single_target_type 1132 -#define single_subscript_attribute_target_type 1133 -#define del_targets_type 1134 -#define del_target_type 1135 -#define del_t_atom_type 1136 -#define del_target_end_type 1137 -#define targets_type 1138 -#define target_type 1139 -#define t_primary_type 1140 // Left-recursive -#define t_lookahead_type 1141 -#define t_atom_type 1142 -#define incorrect_arguments_type 1143 -#define invalid_kwarg_type 1144 -#define invalid_named_expression_type 1145 -#define invalid_assignment_type 1146 -#define invalid_block_type 1147 -#define invalid_comprehension_type 1148 -#define invalid_dict_comprehension_type 1149 -#define invalid_parameters_type 1150 -#define invalid_star_etc_type 1151 -#define invalid_lambda_star_etc_type 1152 -#define invalid_double_type_comments_type 1153 -#define invalid_del_target_type 1154 -#define invalid_import_from_targets_type 1155 -#define _loop0_1_type 1156 -#define _loop0_2_type 1157 -#define _loop0_4_type 1158 -#define _gather_3_type 1159 -#define _loop0_6_type 1160 -#define _gather_5_type 1161 -#define _loop0_8_type 1162 -#define _gather_7_type 1163 -#define _loop0_10_type 1164 -#define _gather_9_type 1165 -#define _loop1_11_type 1166 -#define _loop0_13_type 1167 -#define _gather_12_type 1168 -#define _tmp_14_type 1169 -#define _tmp_15_type 1170 -#define _tmp_16_type 1171 -#define _tmp_17_type 1172 -#define _tmp_18_type 1173 -#define _tmp_19_type 1174 -#define _tmp_20_type 1175 -#define _tmp_21_type 1176 -#define _loop1_22_type 1177 -#define _tmp_23_type 1178 -#define _tmp_24_type 1179 -#define _loop0_26_type 1180 -#define _gather_25_type 1181 -#define _loop0_28_type 1182 -#define _gather_27_type 1183 -#define _tmp_29_type 1184 -#define _loop0_30_type 1185 -#define _loop1_31_type 1186 -#define _loop0_33_type 1187 -#define _gather_32_type 1188 -#define _tmp_34_type 1189 -#define _loop0_36_type 1190 -#define _gather_35_type 1191 -#define _tmp_37_type 1192 -#define _loop0_39_type 1193 -#define _gather_38_type 1194 -#define _loop0_41_type 1195 -#define _gather_40_type 1196 -#define _loop0_43_type 1197 -#define _gather_42_type 1198 -#define _loop0_45_type 1199 -#define _gather_44_type 1200 -#define _tmp_46_type 1201 -#define _loop1_47_type 1202 -#define _tmp_48_type 1203 -#define _tmp_49_type 1204 -#define _tmp_50_type 1205 -#define _tmp_51_type 1206 -#define _tmp_52_type 1207 -#define _loop0_53_type 1208 -#define _loop0_54_type 1209 -#define _loop0_55_type 1210 -#define _loop1_56_type 1211 -#define _loop0_57_type 1212 -#define _loop1_58_type 1213 -#define _loop1_59_type 1214 -#define _loop1_60_type 1215 -#define _loop0_61_type 1216 -#define _loop1_62_type 1217 -#define _loop0_63_type 1218 -#define _loop1_64_type 1219 -#define _loop0_65_type 1220 -#define _loop1_66_type 1221 -#define _loop1_67_type 1222 -#define _tmp_68_type 1223 -#define _loop0_70_type 1224 -#define _gather_69_type 1225 -#define _loop1_71_type 1226 -#define _loop0_73_type 1227 -#define _gather_72_type 1228 -#define _loop1_74_type 1229 -#define _loop0_75_type 1230 -#define _loop0_76_type 1231 -#define _loop0_77_type 1232 -#define _loop1_78_type 1233 -#define _loop0_79_type 1234 -#define _loop1_80_type 1235 -#define _loop1_81_type 1236 -#define _loop1_82_type 1237 -#define _loop0_83_type 1238 -#define _loop1_84_type 1239 -#define _loop0_85_type 1240 -#define _loop1_86_type 1241 -#define _loop0_87_type 1242 -#define _loop1_88_type 1243 -#define _loop1_89_type 1244 -#define _loop1_90_type 1245 -#define _loop1_91_type 1246 -#define _tmp_92_type 1247 -#define _loop0_94_type 1248 -#define _gather_93_type 1249 -#define _tmp_95_type 1250 -#define _tmp_96_type 1251 -#define _tmp_97_type 1252 -#define _tmp_98_type 1253 -#define _loop1_99_type 1254 -#define _tmp_100_type 1255 -#define _tmp_101_type 1256 -#define _loop0_103_type 1257 -#define _gather_102_type 1258 -#define _loop1_104_type 1259 -#define _loop0_105_type 1260 -#define _loop0_106_type 1261 -#define _tmp_107_type 1262 -#define _tmp_108_type 1263 -#define _loop0_110_type 1264 -#define _gather_109_type 1265 -#define _loop0_112_type 1266 -#define _gather_111_type 1267 -#define _loop0_114_type 1268 -#define _gather_113_type 1269 -#define _loop0_116_type 1270 -#define _gather_115_type 1271 -#define _loop0_117_type 1272 -#define _loop0_119_type 1273 -#define _gather_118_type 1274 -#define _tmp_120_type 1275 -#define _loop0_122_type 1276 -#define _gather_121_type 1277 -#define _loop0_124_type 1278 -#define _gather_123_type 1279 -#define _tmp_125_type 1280 -#define _loop0_126_type 1281 -#define _tmp_127_type 1282 -#define _loop0_128_type 1283 -#define _loop0_129_type 1284 -#define _tmp_130_type 1285 -#define _tmp_131_type 1286 -#define _loop0_132_type 1287 -#define _tmp_133_type 1288 -#define _tmp_134_type 1289 -#define _tmp_135_type 1290 -#define _tmp_136_type 1291 -#define _tmp_137_type 1292 -#define _tmp_138_type 1293 -#define _tmp_139_type 1294 -#define _tmp_140_type 1295 -#define _tmp_141_type 1296 -#define _tmp_142_type 1297 -#define _tmp_143_type 1298 -#define _tmp_144_type 1299 -#define _tmp_145_type 1300 -#define _tmp_146_type 1301 -#define _tmp_147_type 1302 -#define _tmp_148_type 1303 -#define _loop1_149_type 1304 -#define _tmp_150_type 1305 -#define _tmp_151_type 1306 +#define lambda_params_type 1069 +#define lambda_parameters_type 1070 +#define lambda_slash_no_default_type 1071 +#define lambda_slash_with_default_type 1072 +#define lambda_star_etc_type 1073 +#define lambda_kwds_type 1074 +#define lambda_param_no_default_type 1075 +#define lambda_param_with_default_type 1076 +#define lambda_param_maybe_default_type 1077 +#define lambda_param_type 1078 +#define disjunction_type 1079 +#define conjunction_type 1080 +#define inversion_type 1081 +#define comparison_type 1082 +#define compare_op_bitwise_or_pair_type 1083 +#define eq_bitwise_or_type 1084 +#define noteq_bitwise_or_type 1085 +#define lte_bitwise_or_type 1086 +#define lt_bitwise_or_type 1087 +#define gte_bitwise_or_type 1088 +#define gt_bitwise_or_type 1089 +#define notin_bitwise_or_type 1090 +#define in_bitwise_or_type 1091 +#define isnot_bitwise_or_type 1092 +#define is_bitwise_or_type 1093 +#define bitwise_or_type 1094 // Left-recursive +#define bitwise_xor_type 1095 // Left-recursive +#define bitwise_and_type 1096 // Left-recursive +#define shift_expr_type 1097 // Left-recursive +#define sum_type 1098 // Left-recursive +#define term_type 1099 // Left-recursive +#define factor_type 1100 +#define power_type 1101 +#define await_primary_type 1102 +#define primary_type 1103 // Left-recursive +#define slices_type 1104 +#define slice_type 1105 +#define atom_type 1106 +#define strings_type 1107 +#define list_type 1108 +#define listcomp_type 1109 +#define tuple_type 1110 +#define group_type 1111 +#define genexp_type 1112 +#define set_type 1113 +#define setcomp_type 1114 +#define dict_type 1115 +#define dictcomp_type 1116 +#define double_starred_kvpairs_type 1117 +#define double_starred_kvpair_type 1118 +#define kvpair_type 1119 +#define for_if_clauses_type 1120 +#define for_if_clause_type 1121 +#define yield_expr_type 1122 +#define arguments_type 1123 +#define args_type 1124 +#define kwargs_type 1125 +#define starred_expression_type 1126 +#define kwarg_or_starred_type 1127 +#define kwarg_or_double_starred_type 1128 +#define star_targets_type 1129 +#define star_targets_seq_type 1130 +#define star_target_type 1131 +#define star_atom_type 1132 +#define single_target_type 1133 +#define single_subscript_attribute_target_type 1134 +#define del_targets_type 1135 +#define del_target_type 1136 +#define del_t_atom_type 1137 +#define del_target_end_type 1138 +#define targets_type 1139 +#define target_type 1140 +#define t_primary_type 1141 // Left-recursive +#define t_lookahead_type 1142 +#define t_atom_type 1143 +#define incorrect_arguments_type 1144 +#define invalid_kwarg_type 1145 +#define invalid_named_expression_type 1146 +#define invalid_assignment_type 1147 +#define invalid_block_type 1148 +#define invalid_comprehension_type 1149 +#define invalid_dict_comprehension_type 1150 +#define invalid_parameters_type 1151 +#define invalid_lambda_parameters_type 1152 +#define invalid_star_etc_type 1153 +#define invalid_lambda_star_etc_type 1154 +#define invalid_double_type_comments_type 1155 +#define invalid_del_target_type 1156 +#define invalid_import_from_targets_type 1157 +#define _loop0_1_type 1158 +#define _loop0_2_type 1159 +#define _loop0_4_type 1160 +#define _gather_3_type 1161 +#define _loop0_6_type 1162 +#define _gather_5_type 1163 +#define _loop0_8_type 1164 +#define _gather_7_type 1165 +#define _loop0_10_type 1166 +#define _gather_9_type 1167 +#define _loop1_11_type 1168 +#define _loop0_13_type 1169 +#define _gather_12_type 1170 +#define _tmp_14_type 1171 +#define _tmp_15_type 1172 +#define _tmp_16_type 1173 +#define _tmp_17_type 1174 +#define _tmp_18_type 1175 +#define _tmp_19_type 1176 +#define _tmp_20_type 1177 +#define _tmp_21_type 1178 +#define _loop1_22_type 1179 +#define _tmp_23_type 1180 +#define _tmp_24_type 1181 +#define _loop0_26_type 1182 +#define _gather_25_type 1183 +#define _loop0_28_type 1184 +#define _gather_27_type 1185 +#define _tmp_29_type 1186 +#define _loop0_30_type 1187 +#define _loop1_31_type 1188 +#define _loop0_33_type 1189 +#define _gather_32_type 1190 +#define _tmp_34_type 1191 +#define _loop0_36_type 1192 +#define _gather_35_type 1193 +#define _tmp_37_type 1194 +#define _loop0_39_type 1195 +#define _gather_38_type 1196 +#define _loop0_41_type 1197 +#define _gather_40_type 1198 +#define _loop0_43_type 1199 +#define _gather_42_type 1200 +#define _loop0_45_type 1201 +#define _gather_44_type 1202 +#define _tmp_46_type 1203 +#define _loop1_47_type 1204 +#define _tmp_48_type 1205 +#define _tmp_49_type 1206 +#define _tmp_50_type 1207 +#define _tmp_51_type 1208 +#define _tmp_52_type 1209 +#define _loop0_53_type 1210 +#define _loop0_54_type 1211 +#define _loop0_55_type 1212 +#define _loop1_56_type 1213 +#define _loop0_57_type 1214 +#define _loop1_58_type 1215 +#define _loop1_59_type 1216 +#define _loop1_60_type 1217 +#define _loop0_61_type 1218 +#define _loop1_62_type 1219 +#define _loop0_63_type 1220 +#define _loop1_64_type 1221 +#define _loop0_65_type 1222 +#define _loop1_66_type 1223 +#define _loop1_67_type 1224 +#define _tmp_68_type 1225 +#define _loop0_70_type 1226 +#define _gather_69_type 1227 +#define _loop1_71_type 1228 +#define _loop0_73_type 1229 +#define _gather_72_type 1230 +#define _loop1_74_type 1231 +#define _loop0_75_type 1232 +#define _loop0_76_type 1233 +#define _loop0_77_type 1234 +#define _loop1_78_type 1235 +#define _loop0_79_type 1236 +#define _loop1_80_type 1237 +#define _loop1_81_type 1238 +#define _loop1_82_type 1239 +#define _loop0_83_type 1240 +#define _loop1_84_type 1241 +#define _loop0_85_type 1242 +#define _loop1_86_type 1243 +#define _loop0_87_type 1244 +#define _loop1_88_type 1245 +#define _loop1_89_type 1246 +#define _loop1_90_type 1247 +#define _loop1_91_type 1248 +#define _tmp_92_type 1249 +#define _loop0_94_type 1250 +#define _gather_93_type 1251 +#define _tmp_95_type 1252 +#define _tmp_96_type 1253 +#define _tmp_97_type 1254 +#define _tmp_98_type 1255 +#define _loop1_99_type 1256 +#define _tmp_100_type 1257 +#define _tmp_101_type 1258 +#define _loop0_103_type 1259 +#define _gather_102_type 1260 +#define _loop1_104_type 1261 +#define _loop0_105_type 1262 +#define _loop0_106_type 1263 +#define _tmp_107_type 1264 +#define _tmp_108_type 1265 +#define _loop0_110_type 1266 +#define _gather_109_type 1267 +#define _loop0_112_type 1268 +#define _gather_111_type 1269 +#define _loop0_114_type 1270 +#define _gather_113_type 1271 +#define _loop0_116_type 1272 +#define _gather_115_type 1273 +#define _loop0_117_type 1274 +#define _loop0_119_type 1275 +#define _gather_118_type 1276 +#define _tmp_120_type 1277 +#define _loop0_122_type 1278 +#define _gather_121_type 1279 +#define _loop0_124_type 1280 +#define _gather_123_type 1281 +#define _tmp_125_type 1282 +#define _loop0_126_type 1283 +#define _tmp_127_type 1284 +#define _loop0_128_type 1285 +#define _loop0_129_type 1286 +#define _tmp_130_type 1287 +#define _tmp_131_type 1288 +#define _loop0_132_type 1289 +#define _tmp_133_type 1290 +#define _loop0_134_type 1291 +#define _tmp_135_type 1292 +#define _tmp_136_type 1293 +#define _tmp_137_type 1294 +#define _tmp_138_type 1295 +#define _tmp_139_type 1296 +#define _tmp_140_type 1297 +#define _tmp_141_type 1298 +#define _tmp_142_type 1299 +#define _tmp_143_type 1300 +#define _tmp_144_type 1301 +#define _tmp_145_type 1302 +#define _tmp_146_type 1303 +#define _tmp_147_type 1304 +#define _tmp_148_type 1305 +#define _tmp_149_type 1306 +#define _tmp_150_type 1307 +#define _loop1_151_type 1308 +#define _loop1_152_type 1309 +#define _tmp_153_type 1310 +#define _tmp_154_type 1311 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -452,6 +457,7 @@ static expr_ty annotated_rhs_rule(Parser *p); static expr_ty expressions_rule(Parser *p); static expr_ty expression_rule(Parser *p); static expr_ty lambdef_rule(Parser *p); +static arguments_ty lambda_params_rule(Parser *p); static arguments_ty lambda_parameters_rule(Parser *p); static asdl_seq* lambda_slash_no_default_rule(Parser *p); static SlashWithDefault* lambda_slash_with_default_rule(Parser *p); @@ -534,6 +540,7 @@ static void *invalid_block_rule(Parser *p); static void *invalid_comprehension_rule(Parser *p); static void *invalid_dict_comprehension_rule(Parser *p); static void *invalid_parameters_rule(Parser *p); +static void *invalid_lambda_parameters_rule(Parser *p); static void *invalid_star_etc_rule(Parser *p); static void *invalid_lambda_star_etc_rule(Parser *p); static void *invalid_double_type_comments_rule(Parser *p); @@ -672,7 +679,7 @@ static void *_tmp_130_rule(Parser *p); static void *_tmp_131_rule(Parser *p); static asdl_seq *_loop0_132_rule(Parser *p); static void *_tmp_133_rule(Parser *p); -static void *_tmp_134_rule(Parser *p); +static asdl_seq *_loop0_134_rule(Parser *p); static void *_tmp_135_rule(Parser *p); static void *_tmp_136_rule(Parser *p); static void *_tmp_137_rule(Parser *p); @@ -687,9 +694,12 @@ static void *_tmp_145_rule(Parser *p); static void *_tmp_146_rule(Parser *p); static void *_tmp_147_rule(Parser *p); static void *_tmp_148_rule(Parser *p); -static asdl_seq *_loop1_149_rule(Parser *p); +static void *_tmp_149_rule(Parser *p); static void *_tmp_150_rule(Parser *p); -static void *_tmp_151_rule(Parser *p); +static asdl_seq *_loop1_151_rule(Parser *p); +static asdl_seq *_loop1_152_rule(Parser *p); +static void *_tmp_153_rule(Parser *p); +static void *_tmp_154_rule(Parser *p); // file: statements? $ @@ -6972,7 +6982,7 @@ expression_rule(Parser *p) return _res; } -// lambdef: 'lambda' lambda_parameters? ':' expression +// lambdef: 'lambda' lambda_params? ':' expression static expr_ty lambdef_rule(Parser *p) { @@ -6992,12 +7002,12 @@ lambdef_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'lambda' lambda_parameters? ':' expression + { // 'lambda' lambda_params? ':' expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> lambdef[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_parameters? ':' expression")); + D(fprintf(stderr, "%*c> lambdef[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' expression")); Token * _keyword; Token * _literal; void *a; @@ -7005,14 +7015,14 @@ lambdef_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 524)) // token='lambda' && - (a = lambda_parameters_rule(p), 1) // lambda_parameters? + (a = lambda_params_rule(p), 1) // lambda_params? && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (b = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ lambdef[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_parameters? ':' expression")); + D(fprintf(stderr, "%*c+ lambdef[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -7032,7 +7042,62 @@ lambdef_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s lambdef[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_parameters? ':' expression")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_params? ':' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_params: invalid_lambda_parameters | lambda_parameters +static arguments_ty +lambda_params_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arguments_ty _res = NULL; + int _mark = p->mark; + { // invalid_lambda_parameters + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_lambda_parameters")); + void *invalid_lambda_parameters_var; + if ( + (invalid_lambda_parameters_var = invalid_lambda_parameters_rule(p)) // invalid_lambda_parameters + ) + { + D(fprintf(stderr, "%*c+ lambda_params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_lambda_parameters")); + _res = invalid_lambda_parameters_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_lambda_parameters")); + } + { // lambda_parameters + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_parameters")); + arguments_ty lambda_parameters_var; + if ( + (lambda_parameters_var = lambda_parameters_rule(p)) // lambda_parameters + ) + { + D(fprintf(stderr, "%*c+ lambda_params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_parameters")); + _res = lambda_parameters_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_parameters")); } _res = NULL; done: @@ -14997,6 +15062,54 @@ invalid_parameters_rule(Parser *p) return _res; } +// invalid_lambda_parameters: +// | lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default +static void * +invalid_lambda_parameters_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); + asdl_seq * _loop0_134_var; + void *_tmp_135_var; + arg_ty lambda_param_no_default_var; + if ( + (_loop0_134_var = _loop0_134_rule(p)) // lambda_param_no_default* + && + (_tmp_135_var = _tmp_135_rule(p)) // lambda_slash_with_default | lambda_param_with_default+ + && + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + D(fprintf(stderr, "%*c+ invalid_lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); + _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + // invalid_star_etc: '*' (')' | ',' (')' | '**')) | '*' ',' TYPE_COMMENT static void * invalid_star_etc_rule(Parser *p) @@ -15015,11 +15128,11 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); Token * _literal; - void *_tmp_134_var; + void *_tmp_136_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_134_var = _tmp_134_rule(p)) // ')' | ',' (')' | '**') + (_tmp_136_var = _tmp_136_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -15089,11 +15202,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_135_var; + void *_tmp_137_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_135_var = _tmp_135_rule(p)) // ':' | ',' (':' | '**') + (_tmp_137_var = _tmp_137_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -16503,12 +16616,12 @@ _loop1_22_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_136_var; + void *_tmp_138_var; while ( - (_tmp_136_var = _tmp_136_rule(p)) // star_targets '=' + (_tmp_138_var = _tmp_138_rule(p)) // star_targets '=' ) { - _res = _tmp_136_var; + _res = _tmp_138_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -16956,12 +17069,12 @@ _loop0_30_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_137_var; + void *_tmp_139_var; while ( - (_tmp_137_var = _tmp_137_rule(p)) // '.' | '...' + (_tmp_139_var = _tmp_139_rule(p)) // '.' | '...' ) { - _res = _tmp_137_var; + _res = _tmp_139_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -17022,12 +17135,12 @@ _loop1_31_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_138_var; + void *_tmp_140_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) // '.' | '...' + (_tmp_140_var = _tmp_140_rule(p)) // '.' | '...' ) { - _res = _tmp_138_var; + _res = _tmp_140_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19154,12 +19267,12 @@ _loop1_67_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_139_var; + void *_tmp_141_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) // '@' named_expression NEWLINE + (_tmp_141_var = _tmp_141_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_139_var; + _res = _tmp_141_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19386,12 +19499,12 @@ _loop1_71_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_140_var; + void *_tmp_142_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) // ',' star_expression + (_tmp_142_var = _tmp_142_rule(p)) // ',' star_expression ) { - _res = _tmp_140_var; + _res = _tmp_142_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19571,12 +19684,12 @@ _loop1_74_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_141_var; + void *_tmp_143_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) // ',' expression + (_tmp_143_var = _tmp_143_rule(p)) // ',' expression ) { - _res = _tmp_141_var; + _res = _tmp_143_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -20601,12 +20714,12 @@ _loop1_89_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_142_var; + void *_tmp_144_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) // 'or' conjunction + (_tmp_144_var = _tmp_144_rule(p)) // 'or' conjunction ) { - _res = _tmp_142_var; + _res = _tmp_144_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -20672,12 +20785,12 @@ _loop1_90_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_143_var; + void *_tmp_145_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) // 'and' inversion + (_tmp_145_var = _tmp_145_rule(p)) // 'and' inversion ) { - _res = _tmp_143_var; + _res = _tmp_145_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21593,12 +21706,12 @@ _loop0_105_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_144_var; + void *_tmp_146_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) // 'if' disjunction + (_tmp_146_var = _tmp_146_rule(p)) // 'if' disjunction ) { - _res = _tmp_144_var; + _res = _tmp_146_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21659,12 +21772,12 @@ _loop0_106_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_145_var; + void *_tmp_147_var; while ( - (_tmp_145_var = _tmp_145_rule(p)) // 'if' disjunction + (_tmp_147_var = _tmp_147_rule(p)) // 'if' disjunction ) { - _res = _tmp_145_var; + _res = _tmp_147_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22269,12 +22382,12 @@ _loop0_117_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_146_var; + void *_tmp_148_var; while ( - (_tmp_146_var = _tmp_146_rule(p)) // ',' star_target + (_tmp_148_var = _tmp_148_rule(p)) // ',' star_target ) { - _res = _tmp_146_var; + _res = _tmp_148_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22878,12 +22991,12 @@ _loop0_128_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_147_var; + void *_tmp_149_var; while ( - (_tmp_147_var = _tmp_147_rule(p)) // star_targets '=' + (_tmp_149_var = _tmp_149_rule(p)) // star_targets '=' ) { - _res = _tmp_147_var; + _res = _tmp_149_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22944,12 +23057,12 @@ _loop0_129_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_148_var; + void *_tmp_150_var; while ( - (_tmp_148_var = _tmp_148_rule(p)) // star_targets '=' + (_tmp_150_var = _tmp_150_rule(p)) // star_targets '=' ) { - _res = _tmp_148_var; + _res = _tmp_150_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -23214,13 +23327,13 @@ _tmp_133_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_149_var; + asdl_seq * _loop1_151_var; if ( - (_loop1_149_var = _loop1_149_rule(p)) // param_with_default+ + (_loop1_151_var = _loop1_151_rule(p)) // param_with_default+ ) { D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_149_var; + _res = _loop1_151_var; goto done; } p->mark = _mark; @@ -23233,9 +23346,130 @@ _tmp_133_rule(Parser *p) return _res; } -// _tmp_134: ')' | ',' (')' | '**') +// _loop0_134: lambda_param_no_default +static asdl_seq * +_loop0_134_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_134_type, _seq); + D(p->level--); + return _seq; +} + +// _tmp_135: lambda_slash_with_default | lambda_param_with_default+ +static void * +_tmp_135_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // lambda_slash_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + SlashWithDefault* lambda_slash_with_default_var; + if ( + (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default + ) + { + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + _res = lambda_slash_with_default_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); + } + { // lambda_param_with_default+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + asdl_seq * _loop1_152_var; + if ( + (_loop1_152_var = _loop1_152_rule(p)) // lambda_param_with_default+ + ) + { + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + _res = _loop1_152_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_136: ')' | ',' (')' | '**') static void * -_tmp_134_rule(Parser *p) +_tmp_136_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23249,18 +23483,18 @@ _tmp_134_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -23268,21 +23502,21 @@ _tmp_134_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_150_var; + void *_tmp_153_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_150_var = _tmp_150_rule(p)) // ')' | '**' + (_tmp_153_var = _tmp_153_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_150_var); + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_153_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -23291,9 +23525,9 @@ _tmp_134_rule(Parser *p) return _res; } -// _tmp_135: ':' | ',' (':' | '**') +// _tmp_137: ':' | ',' (':' | '**') static void * -_tmp_135_rule(Parser *p) +_tmp_137_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23307,18 +23541,18 @@ _tmp_135_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -23326,21 +23560,21 @@ _tmp_135_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_151_var; + void *_tmp_154_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_151_var = _tmp_151_rule(p)) // ':' | '**' + (_tmp_154_var = _tmp_154_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_151_var); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_154_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -23349,9 +23583,9 @@ _tmp_135_rule(Parser *p) return _res; } -// _tmp_136: star_targets '=' +// _tmp_138: star_targets '=' static void * -_tmp_136_rule(Parser *p) +_tmp_138_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23365,7 +23599,7 @@ _tmp_136_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -23374,7 +23608,7 @@ _tmp_136_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23384,7 +23618,7 @@ _tmp_136_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -23393,9 +23627,9 @@ _tmp_136_rule(Parser *p) return _res; } -// _tmp_137: '.' | '...' +// _tmp_139: '.' | '...' static void * -_tmp_137_rule(Parser *p) +_tmp_139_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23409,18 +23643,18 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23428,18 +23662,18 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23448,9 +23682,9 @@ _tmp_137_rule(Parser *p) return _res; } -// _tmp_138: '.' | '...' +// _tmp_140: '.' | '...' static void * -_tmp_138_rule(Parser *p) +_tmp_140_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23464,18 +23698,18 @@ _tmp_138_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23483,18 +23717,18 @@ _tmp_138_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23503,9 +23737,9 @@ _tmp_138_rule(Parser *p) return _res; } -// _tmp_139: '@' named_expression NEWLINE +// _tmp_141: '@' named_expression NEWLINE static void * -_tmp_139_rule(Parser *p) +_tmp_141_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23519,7 +23753,7 @@ _tmp_139_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -23531,7 +23765,7 @@ _tmp_139_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23541,7 +23775,7 @@ _tmp_139_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -23550,9 +23784,9 @@ _tmp_139_rule(Parser *p) return _res; } -// _tmp_140: ',' star_expression +// _tmp_142: ',' star_expression static void * -_tmp_140_rule(Parser *p) +_tmp_142_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23566,7 +23800,7 @@ _tmp_140_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -23575,7 +23809,7 @@ _tmp_140_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23585,7 +23819,7 @@ _tmp_140_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -23594,9 +23828,9 @@ _tmp_140_rule(Parser *p) return _res; } -// _tmp_141: ',' expression +// _tmp_143: ',' expression static void * -_tmp_141_rule(Parser *p) +_tmp_143_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23610,7 +23844,7 @@ _tmp_141_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -23619,7 +23853,7 @@ _tmp_141_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23629,7 +23863,7 @@ _tmp_141_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -23638,9 +23872,9 @@ _tmp_141_rule(Parser *p) return _res; } -// _tmp_142: 'or' conjunction +// _tmp_144: 'or' conjunction static void * -_tmp_142_rule(Parser *p) +_tmp_144_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23654,7 +23888,7 @@ _tmp_142_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -23663,7 +23897,7 @@ _tmp_142_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23673,7 +23907,7 @@ _tmp_142_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -23682,9 +23916,9 @@ _tmp_142_rule(Parser *p) return _res; } -// _tmp_143: 'and' inversion +// _tmp_145: 'and' inversion static void * -_tmp_143_rule(Parser *p) +_tmp_145_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23698,7 +23932,7 @@ _tmp_143_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -23707,7 +23941,7 @@ _tmp_143_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23717,7 +23951,7 @@ _tmp_143_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -23726,9 +23960,9 @@ _tmp_143_rule(Parser *p) return _res; } -// _tmp_144: 'if' disjunction +// _tmp_146: 'if' disjunction static void * -_tmp_144_rule(Parser *p) +_tmp_146_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23742,7 +23976,7 @@ _tmp_144_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -23751,7 +23985,7 @@ _tmp_144_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23761,7 +23995,7 @@ _tmp_144_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -23770,9 +24004,9 @@ _tmp_144_rule(Parser *p) return _res; } -// _tmp_145: 'if' disjunction +// _tmp_147: 'if' disjunction static void * -_tmp_145_rule(Parser *p) +_tmp_147_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23786,7 +24020,7 @@ _tmp_145_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -23795,7 +24029,7 @@ _tmp_145_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23805,7 +24039,7 @@ _tmp_145_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -23814,9 +24048,9 @@ _tmp_145_rule(Parser *p) return _res; } -// _tmp_146: ',' star_target +// _tmp_148: ',' star_target static void * -_tmp_146_rule(Parser *p) +_tmp_148_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23830,7 +24064,7 @@ _tmp_146_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -23839,7 +24073,7 @@ _tmp_146_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23849,7 +24083,7 @@ _tmp_146_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -23858,9 +24092,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: star_targets '=' +// _tmp_149: star_targets '=' static void * -_tmp_147_rule(Parser *p) +_tmp_149_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23874,7 +24108,7 @@ _tmp_147_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -23883,12 +24117,12 @@ _tmp_147_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -23897,9 +24131,9 @@ _tmp_147_rule(Parser *p) return _res; } -// _tmp_148: star_targets '=' +// _tmp_150: star_targets '=' static void * -_tmp_148_rule(Parser *p) +_tmp_150_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23913,7 +24147,7 @@ _tmp_148_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -23922,12 +24156,12 @@ _tmp_148_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -23936,9 +24170,9 @@ _tmp_148_rule(Parser *p) return _res; } -// _loop1_149: param_with_default +// _loop1_151: param_with_default static asdl_seq * -_loop1_149_rule(Parser *p) +_loop1_151_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23962,7 +24196,7 @@ _loop1_149_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -23984,7 +24218,7 @@ _loop1_149_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -24002,14 +24236,85 @@ _loop1_149_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_149_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_151_type, _seq); D(p->level--); return _seq; } -// _tmp_150: ')' | '**' +// _loop1_152: lambda_param_with_default +static asdl_seq * +_loop1_152_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_152[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_152_type, _seq); + D(p->level--); + return _seq; +} + +// _tmp_153: ')' | '**' static void * -_tmp_150_rule(Parser *p) +_tmp_153_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24023,18 +24328,18 @@ _tmp_150_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -24042,18 +24347,18 @@ _tmp_150_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -24062,9 +24367,9 @@ _tmp_150_rule(Parser *p) return _res; } -// _tmp_151: ':' | '**' +// _tmp_154: ':' | '**' static void * -_tmp_151_rule(Parser *p) +_tmp_154_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24078,18 +24383,18 @@ _tmp_151_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -24097,18 +24402,18 @@ _tmp_151_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; From webhook-mailer at python.org Wed Jun 10 11:39:20 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Wed, 10 Jun 2020 15:39:20 -0000 Subject: [Python-checkins] bpo-36543: Revert "bpo-36543: Remove the xml.etree.cElementTree module." (GH-20117) Message-ID: https://github.com/python/cpython/commit/ec88e1bca81a167e6d5c0ac635e22f84298cb1df commit: ec88e1bca81a167e6d5c0ac635e22f84298cb1df branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-10T17:39:12+02:00 summary: bpo-36543: Revert "bpo-36543: Remove the xml.etree.cElementTree module." (GH-20117) * Revert "bpo-36543: Remove the xml.etree.cElementTree module. (GH-19108)" This reverts commit b33e52511a59c6da7132c226b7f7489b092a33eb. files: A Lib/xml/etree/cElementTree.py A Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst M Doc/library/xml.etree.elementtree.rst M Doc/whatsnew/3.9.rst M Lib/test/test_xml_etree_c.py diff --git a/Doc/library/xml.etree.elementtree.rst b/Doc/library/xml.etree.elementtree.rst index 2085a85927e46..7725e4d158d42 100644 --- a/Doc/library/xml.etree.elementtree.rst +++ b/Doc/library/xml.etree.elementtree.rst @@ -15,6 +15,8 @@ for parsing and creating XML data. .. versionchanged:: 3.3 This module will use a fast implementation whenever available. + +.. deprecated:: 3.3 The :mod:`xml.etree.cElementTree` module is deprecated. diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index b20cd14565ae1..67a83bc958457 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -826,11 +826,6 @@ Removed module have been removed. They were deprecated in Python 3.2. Use ``iter(x)`` or ``list(x)`` instead of ``x.getchildren()`` and ``x.iter()`` or ``list(x.iter())`` instead of ``x.getiterator()``. - The ``xml.etree.cElementTree`` module has been removed, - use the :mod:`xml.etree.ElementTree` module instead. - Since Python 3.3 the ``xml.etree.cElementTree`` module has been deprecated, - the ``xml.etree.ElementTree`` module uses a fast implementation whenever - available. (Contributed by Serhiy Storchaka in :issue:`36543`.) * The old :mod:`plistlib` API has been removed, it was deprecated since Python diff --git a/Lib/test/test_xml_etree_c.py b/Lib/test/test_xml_etree_c.py index 7437e13d0611c..e26e1714a540b 100644 --- a/Lib/test/test_xml_etree_c.py +++ b/Lib/test/test_xml_etree_c.py @@ -8,6 +8,9 @@ cET = import_fresh_module('xml.etree.ElementTree', fresh=['_elementtree']) +cET_alias = import_fresh_module('xml.etree.cElementTree', + fresh=['_elementtree', 'xml.etree'], + deprecated=True) @unittest.skipUnless(cET, 'requires _elementtree') @@ -167,6 +170,14 @@ def test_xmlpullparser_leaks(self): support.gc_collect() + at unittest.skipUnless(cET, 'requires _elementtree') +class TestAliasWorking(unittest.TestCase): + # Test that the cET alias module is alive + def test_alias_working(self): + e = cET_alias.Element('foo') + self.assertEqual(e.tag, 'foo') + + @unittest.skipUnless(cET, 'requires _elementtree') @support.cpython_only class TestAcceleratorImported(unittest.TestCase): @@ -175,6 +186,9 @@ def test_correct_import_cET(self): # SubElement is a function so it retains _elementtree as its module. self.assertEqual(cET.SubElement.__module__, '_elementtree') + def test_correct_import_cET_alias(self): + self.assertEqual(cET_alias.SubElement.__module__, '_elementtree') + def test_parser_comes_from_C(self): # The type of methods defined in Python code is types.FunctionType, # while the type of methods defined inside _elementtree is @@ -214,6 +228,7 @@ def test_main(): # Run the tests specific to the C implementation support.run_unittest( MiscTests, + TestAliasWorking, TestAcceleratorImported, SizeofTest, ) diff --git a/Lib/xml/etree/cElementTree.py b/Lib/xml/etree/cElementTree.py new file mode 100644 index 0000000000000..368e679189582 --- /dev/null +++ b/Lib/xml/etree/cElementTree.py @@ -0,0 +1,3 @@ +# Deprecated alias for xml.etree.ElementTree + +from xml.etree.ElementTree import * diff --git a/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst b/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst new file mode 100644 index 0000000000000..468c1ac9eee17 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-15-21-14-45.bpo-36543.Jt-eSX.rst @@ -0,0 +1 @@ +Restored the deprecated :mod:`xml.etree.cElementTree` module. From webhook-mailer at python.org Wed Jun 10 12:09:09 2020 From: webhook-mailer at python.org (scoder) Date: Wed, 10 Jun 2020 16:09:09 -0000 Subject: [Python-checkins] bpo-40703: Let PyType_FromSpec() set "type.__module__" only if it is not set yet. (GH-20273) Message-ID: https://github.com/python/cpython/commit/24b8bad6d30ae4fb37ee686a073adfa5308659f9 commit: 24b8bad6d30ae4fb37ee686a073adfa5308659f9 branch: master author: scoder committer: GitHub date: 2020-06-10T18:09:01+02:00 summary: bpo-40703: Let PyType_FromSpec() set "type.__module__" only if it is not set yet. (GH-20273) files: A Misc/NEWS.d/next/C API/2020-05-20-19-11-12.bpo-40703.qQXfW8.rst M Objects/typeobject.c diff --git a/Misc/NEWS.d/next/C API/2020-05-20-19-11-12.bpo-40703.qQXfW8.rst b/Misc/NEWS.d/next/C API/2020-05-20-19-11-12.bpo-40703.qQXfW8.rst new file mode 100644 index 0000000000000..5385a2d8dce45 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-05-20-19-11-12.bpo-40703.qQXfW8.rst @@ -0,0 +1,2 @@ +The PyType_FromSpec*() functions no longer overwrite the type's "__module__" attribute +if it is set via "Py_tp_members" or "Py_tp_getset". diff --git a/Objects/typeobject.c b/Objects/typeobject.c index 1d556e96be5f2..c8f0d2ee45f3c 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -3067,23 +3067,28 @@ PyType_FromModuleAndSpec(PyObject *module, PyType_Spec *spec, PyObject *bases) } /* Set type.__module__ */ - s = strrchr(spec->name, '.'); - if (s != NULL) { - int err; - modname = PyUnicode_FromStringAndSize( - spec->name, (Py_ssize_t)(s - spec->name)); - if (modname == NULL) { + if (_PyDict_GetItemIdWithError(type->tp_dict, &PyId___module__) == NULL) { + if (PyErr_Occurred()) { goto fail; } - err = _PyDict_SetItemId(type->tp_dict, &PyId___module__, modname); - Py_DECREF(modname); - if (err != 0) - goto fail; - } else { - if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, - "builtin type %.200s has no __module__ attribute", - spec->name)) - goto fail; + s = strrchr(spec->name, '.'); + if (s != NULL) { + int err; + modname = PyUnicode_FromStringAndSize( + spec->name, (Py_ssize_t)(s - spec->name)); + if (modname == NULL) { + goto fail; + } + err = _PyDict_SetItemId(type->tp_dict, &PyId___module__, modname); + Py_DECREF(modname); + if (err != 0) + goto fail; + } else { + if (PyErr_WarnFormat(PyExc_DeprecationWarning, 1, + "builtin type %.200s has no __module__ attribute", + spec->name)) + goto fail; + } } return (PyObject*)res; From webhook-mailer at python.org Wed Jun 10 12:38:14 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 10 Jun 2020 16:38:14 -0000 Subject: [Python-checkins] bpo-40943: Replace PY_FORMAT_SIZE_T with "z" (GH-20781) Message-ID: https://github.com/python/cpython/commit/d36cf5f1d20ce9f111a8fc997104785086e8eee6 commit: d36cf5f1d20ce9f111a8fc997104785086e8eee6 branch: master author: Victor Stinner committer: GitHub date: 2020-06-10T18:38:05+02:00 summary: bpo-40943: Replace PY_FORMAT_SIZE_T with "z" (GH-20781) The PEP 353, written in 2005, introduced PY_FORMAT_SIZE_T. Python no longer supports macOS 10.4 and Visual Studio 2010, but requires more recent macOS and Visual Studio versions. In 2020 with Python 3.10, it is now safe to use directly "%zu" to format size_t and "%zi" to format Py_ssize_t. files: M Include/pyport.h M Modules/_pickle.c M Modules/_sre.c M Modules/gcmodule.c M Modules/sre_lib.h M Objects/bytesobject.c M Objects/object.c M Objects/obmalloc.c M Objects/unicodeobject.c M Python/getargs.c M Python/pyhash.c diff --git a/Include/pyport.h b/Include/pyport.h index bdbd0c942f682..3c71f30bce16f 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -131,7 +131,9 @@ typedef int Py_ssize_clean_t; /* Smallest negative value of type Py_ssize_t. */ #define PY_SSIZE_T_MIN (-PY_SSIZE_T_MAX-1) -/* PY_FORMAT_SIZE_T is a platform-specific modifier for use in a printf +/* Macro kept for backward compatibility: use "z" in new code. + * + * PY_FORMAT_SIZE_T is a platform-specific modifier for use in a printf * format to convert an argument with the width of a size_t or Py_ssize_t. * C99 introduced "z" for this purpose, but old MSVCs had not supported it. * Since MSVC supports "z" since (at least) 2015, we can just use "z" diff --git a/Modules/_pickle.c b/Modules/_pickle.c index 5539e64025a39..25e888db19c23 100644 --- a/Modules/_pickle.c +++ b/Modules/_pickle.c @@ -1715,7 +1715,7 @@ memo_get(PicklerObject *self, PyObject *key) if (!self->bin) { pdata[0] = GET; PyOS_snprintf(pdata + 1, sizeof(pdata) - 1, - "%" PY_FORMAT_SIZE_T "d\n", *value); + "%zd\n", *value); len = strlen(pdata); } else { @@ -1772,7 +1772,7 @@ memo_put(PicklerObject *self, PyObject *obj) else if (!self->bin) { pdata[0] = PUT; PyOS_snprintf(pdata + 1, sizeof(pdata) - 1, - "%" PY_FORMAT_SIZE_T "d\n", idx); + "%zd\n", idx); len = strlen(pdata); } else { diff --git a/Modules/_sre.c b/Modules/_sre.c index bdc427822d7e1..70bd8baa01e20 100644 --- a/Modules/_sre.c +++ b/Modules/_sre.c @@ -211,7 +211,7 @@ data_stack_grow(SRE_STATE* state, Py_ssize_t size) if (cursize < minsize) { void* stack; cursize = minsize+minsize/4+1024; - TRACE(("allocate/grow stack %" PY_FORMAT_SIZE_T "d\n", cursize)); + TRACE(("allocate/grow stack %zd\n", cursize)); stack = PyMem_REALLOC(state->data_stack, cursize); if (!stack) { data_stack_dealloc(state); diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index f68258d7a327c..b3bcc8aa4263e 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1043,7 +1043,7 @@ show_stats_each_generations(GCState *gcstate) for (int i = 0; i < NUM_GENERATIONS && pos < sizeof(buf); i++) { pos += PyOS_snprintf(buf+pos, sizeof(buf)-pos, - " %"PY_FORMAT_SIZE_T"d", + " %zd", gc_list_size(GEN_HEAD(gcstate, i))); } @@ -1290,8 +1290,7 @@ collect(PyThreadState *tstate, int generation, if (gcstate->debug & DEBUG_STATS) { double d = _PyTime_AsSecondsDouble(_PyTime_GetMonotonicClock() - t1); PySys_WriteStderr( - "gc: done, %" PY_FORMAT_SIZE_T "d unreachable, " - "%" PY_FORMAT_SIZE_T "d uncollectable, %.4fs elapsed\n", + "gc: done, %zd unreachable, %zd uncollectable, %.4fs elapsed\n", n+m, n, d); } diff --git a/Modules/sre_lib.h b/Modules/sre_lib.h index 2657d8d82c6f1..cfe0a4af2c483 100644 --- a/Modules/sre_lib.h +++ b/Modules/sre_lib.h @@ -323,12 +323,12 @@ SRE(count)(SRE_STATE* state, const SRE_CODE* pattern, Py_ssize_t maxcount) if (!i) break; } - TRACE(("|%p|%p|COUNT %" PY_FORMAT_SIZE_T "d\n", pattern, ptr, + TRACE(("|%p|%p|COUNT %zd\n", pattern, ptr, (SRE_CHAR*) state->ptr - ptr)); return (SRE_CHAR*) state->ptr - ptr; } - TRACE(("|%p|%p|COUNT %" PY_FORMAT_SIZE_T "d\n", pattern, ptr, + TRACE(("|%p|%p|COUNT %zd\n", pattern, ptr, ptr - (SRE_CHAR*) state->ptr)); return ptr - (SRE_CHAR*) state->ptr; } @@ -414,8 +414,7 @@ SRE(info)(SRE_STATE* state, const SRE_CODE* pattern) #define DATA_STACK_ALLOC(state, type, ptr) \ do { \ alloc_pos = state->data_stack_base; \ - TRACE(("allocating %s in %" PY_FORMAT_SIZE_T "d " \ - "(%" PY_FORMAT_SIZE_T "d)\n", \ + TRACE(("allocating %s in %zd (%zd)\n", \ Py_STRINGIFY(type), alloc_pos, sizeof(type))); \ if (sizeof(type) > state->data_stack_size - alloc_pos) { \ int j = data_stack_grow(state, sizeof(type)); \ @@ -429,14 +428,13 @@ do { \ #define DATA_STACK_LOOKUP_AT(state, type, ptr, pos) \ do { \ - TRACE(("looking up %s at %" PY_FORMAT_SIZE_T "d\n", Py_STRINGIFY(type), pos)); \ + TRACE(("looking up %s at %zd\n", Py_STRINGIFY(type), pos)); \ ptr = (type*)(state->data_stack+pos); \ } while (0) #define DATA_STACK_PUSH(state, data, size) \ do { \ - TRACE(("copy data in %p to %" PY_FORMAT_SIZE_T "d " \ - "(%" PY_FORMAT_SIZE_T "d)\n", \ + TRACE(("copy data in %p to %zd (%zd)\n", \ data, state->data_stack_base, size)); \ if (size > state->data_stack_size - state->data_stack_base) { \ int j = data_stack_grow(state, size); \ @@ -453,8 +451,7 @@ do { \ safely casted to `void*`, see bpo-39943 for details. */ #define DATA_STACK_POP(state, data, size, discard) \ do { \ - TRACE(("copy data to %p from %" PY_FORMAT_SIZE_T "d " \ - "(%" PY_FORMAT_SIZE_T "d)\n", \ + TRACE(("copy data to %p from %zd (%zd)\n", \ data, state->data_stack_base-size, size)); \ memcpy((void*) data, state->data_stack+state->data_stack_base-size, size); \ if (discard) \ @@ -463,8 +460,7 @@ do { \ #define DATA_STACK_POP_DISCARD(state, size) \ do { \ - TRACE(("discard data from %" PY_FORMAT_SIZE_T "d " \ - "(%" PY_FORMAT_SIZE_T "d)\n", \ + TRACE(("discard data from %zd (%zd)\n", \ state->data_stack_base-size, size)); \ state->data_stack_base -= size; \ } while(0) @@ -577,8 +573,7 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) /* optimization info block */ /* <1=skip> <2=flags> <3=min> ... */ if (ctx->pattern[3] && (uintptr_t)(end - ctx->ptr) < ctx->pattern[3]) { - TRACE(("reject (got %" PY_FORMAT_SIZE_T "d chars, " - "need %" PY_FORMAT_SIZE_T "d)\n", + TRACE(("reject (got %zd chars, need %zd)\n", end - ctx->ptr, (Py_ssize_t) ctx->pattern[3])); RETURN_FAILURE; } @@ -1028,7 +1023,7 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) ctx->count = ctx->u.rep->count+1; - TRACE(("|%p|%p|MAX_UNTIL %" PY_FORMAT_SIZE_T "d\n", ctx->pattern, + TRACE(("|%p|%p|MAX_UNTIL %zd\n", ctx->pattern, ctx->ptr, ctx->count)); if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { @@ -1091,7 +1086,7 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) ctx->count = ctx->u.rep->count+1; - TRACE(("|%p|%p|MIN_UNTIL %" PY_FORMAT_SIZE_T "d %p\n", ctx->pattern, + TRACE(("|%p|%p|MIN_UNTIL %zd %p\n", ctx->pattern, ctx->ptr, ctx->count, ctx->u.rep->pattern)); if (ctx->count < (Py_ssize_t) ctx->u.rep->pattern[1]) { @@ -1358,7 +1353,7 @@ SRE(match)(SRE_STATE* state, const SRE_CODE* pattern, int toplevel) TRACE(("|%p|%p|JUMP_ASSERT_NOT\n", ctx->pattern, ctx->ptr)); goto jump_assert_not; case JUMP_NONE: - TRACE(("|%p|%p|RETURN %" PY_FORMAT_SIZE_T "d\n", ctx->pattern, + TRACE(("|%p|%p|RETURN %zd\n", ctx->pattern, ctx->ptr, ret)); break; } @@ -1420,7 +1415,7 @@ SRE(search)(SRE_STATE* state, SRE_CODE* pattern) pattern += 1 + pattern[1]; } - TRACE(("prefix = %p %" PY_FORMAT_SIZE_T "d %" PY_FORMAT_SIZE_T "d\n", + TRACE(("prefix = %p %zd %zd\n", prefix, prefix_len, prefix_skip)); TRACE(("charset = %p\n", charset)); diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 8d6454059ef88..b79c2460409eb 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -256,27 +256,29 @@ PyBytes_FromFormatV(const char *format, va_list vargs) } case 'd': - if (longflag) + if (longflag) { sprintf(buffer, "%ld", va_arg(vargs, long)); - else if (size_tflag) - sprintf(buffer, "%" PY_FORMAT_SIZE_T "d", - va_arg(vargs, Py_ssize_t)); - else + } + else if (size_tflag) { + sprintf(buffer, "%zd", va_arg(vargs, Py_ssize_t)); + } + else { sprintf(buffer, "%d", va_arg(vargs, int)); + } assert(strlen(buffer) < sizeof(buffer)); WRITE_BYTES(buffer); break; case 'u': - if (longflag) - sprintf(buffer, "%lu", - va_arg(vargs, unsigned long)); - else if (size_tflag) - sprintf(buffer, "%" PY_FORMAT_SIZE_T "u", - va_arg(vargs, size_t)); - else - sprintf(buffer, "%u", - va_arg(vargs, unsigned int)); + if (longflag) { + sprintf(buffer, "%lu", va_arg(vargs, unsigned long)); + } + else if (size_tflag) { + sprintf(buffer, "%zu", va_arg(vargs, size_t)); + } + else { + sprintf(buffer, "%u", va_arg(vargs, unsigned int)); + } assert(strlen(buffer) < sizeof(buffer)); WRITE_BYTES(buffer); break; diff --git a/Objects/object.c b/Objects/object.c index 623ee52eb1e22..10cbd1b7c16f5 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -66,8 +66,7 @@ _Py_GetRefTotal(void) void _PyDebug_PrintTotalRefs(void) { fprintf(stderr, - "[%" PY_FORMAT_SIZE_T "d refs, " - "%" PY_FORMAT_SIZE_T "d blocks]\n", + "[%zd refs, %zd blocks]\n", _Py_GetRefTotal(), _Py_GetAllocatedBlocks()); } #endif /* Py_REF_DEBUG */ @@ -1876,9 +1875,10 @@ _Py_PrintReferences(FILE *fp) PyObject *op; fprintf(fp, "Remaining objects:\n"); for (op = refchain._ob_next; op != &refchain; op = op->_ob_next) { - fprintf(fp, "%p [%" PY_FORMAT_SIZE_T "d] ", (void *)op, Py_REFCNT(op)); - if (PyObject_Print(op, fp, 0) != 0) + fprintf(fp, "%p [%zd] ", (void *)op, Py_REFCNT(op)); + if (PyObject_Print(op, fp, 0) != 0) { PyErr_Clear(); + } putc('\n', fp); } } @@ -1892,7 +1892,7 @@ _Py_PrintReferenceAddresses(FILE *fp) PyObject *op; fprintf(fp, "Remaining object addresses:\n"); for (op = refchain._ob_next; op != &refchain; op = op->_ob_next) - fprintf(fp, "%p [%" PY_FORMAT_SIZE_T "d] %s\n", (void *)op, + fprintf(fp, "%p [%zd] %s\n", (void *)op, Py_REFCNT(op), Py_TYPE(op)->tp_name); } diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c index eb34f10bddf99..03d0e8e51264c 100644 --- a/Objects/obmalloc.c +++ b/Objects/obmalloc.c @@ -2420,8 +2420,7 @@ _PyObject_DebugDumpAddress(const void *p) fprintf(stderr, " API '%c'\n", id); nbytes = read_size_t(q - 2*SST); - fprintf(stderr, " %" PY_FORMAT_SIZE_T "u bytes originally " - "requested\n", nbytes); + fprintf(stderr, " %zu bytes originally requested\n", nbytes); /* In case this is nuts, check the leading pad bytes first. */ fprintf(stderr, " The %d pad bytes at p-%d are ", SST-1, SST-1); @@ -2477,8 +2476,9 @@ _PyObject_DebugDumpAddress(const void *p) #ifdef PYMEM_DEBUG_SERIALNO size_t serial = read_size_t(tail + SST); - fprintf(stderr, " The block was made by call #%" PY_FORMAT_SIZE_T - "u to debug malloc/realloc.\n", serial); + fprintf(stderr, + " The block was made by call #%zu to debug malloc/realloc.\n", + serial); #endif if (nbytes > 0) { @@ -2553,7 +2553,7 @@ _PyDebugAllocatorStats(FILE *out, char buf1[128]; char buf2[128]; PyOS_snprintf(buf1, sizeof(buf1), - "%d %ss * %" PY_FORMAT_SIZE_T "d bytes each", + "%d %ss * %zd bytes each", num_blocks, block_name, sizeof_block); PyOS_snprintf(buf2, sizeof(buf2), "%48s ", buf1); @@ -2694,10 +2694,7 @@ _PyObject_DebugMallocStats(FILE *out) assert(b == 0 && f == 0); continue; } - fprintf(out, "%5u %6u " - "%11" PY_FORMAT_SIZE_T "u " - "%15" PY_FORMAT_SIZE_T "u " - "%13" PY_FORMAT_SIZE_T "u\n", + fprintf(out, "%5u %6u %11zu %15zu %13zu\n", i, size, p, b, f); allocated_bytes += b * size; available_bytes += f * size; @@ -2716,8 +2713,8 @@ _PyObject_DebugMallocStats(FILE *out) (void)printone(out, "# arenas allocated current", narenas); PyOS_snprintf(buf, sizeof(buf), - "%" PY_FORMAT_SIZE_T "u arenas * %d bytes/arena", - narenas, ARENA_SIZE); + "%zu arenas * %d bytes/arena", + narenas, ARENA_SIZE); (void)printone(out, buf, narenas * ARENA_SIZE); fputc('\n', out); diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index df10888949aba..7ab0c882db049 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1356,19 +1356,18 @@ _PyUnicode_Dump(PyObject *op) } else data = unicode->data.any; - printf("%s: len=%" PY_FORMAT_SIZE_T "u, ", - unicode_kind_name(op), ascii->length); + printf("%s: len=%zu, ", unicode_kind_name(op), ascii->length); if (ascii->wstr == data) printf("shared "); printf("wstr=%p", (void *)ascii->wstr); if (!(ascii->state.ascii == 1 && ascii->state.compact == 1)) { - printf(" (%" PY_FORMAT_SIZE_T "u), ", compact->wstr_length); - if (!ascii->state.compact && compact->utf8 == unicode->data.any) + printf(" (%zu), ", compact->wstr_length); + if (!ascii->state.compact && compact->utf8 == unicode->data.any) { printf("shared "); - printf("utf8=%p (%" PY_FORMAT_SIZE_T "u)", - (void *)compact->utf8, compact->utf8_length); + } + printf("utf8=%p (%zu)", (void *)compact->utf8, compact->utf8_length); } printf(", data=%p\n", data); } @@ -2845,35 +2844,35 @@ unicode_fromformat_arg(_PyUnicodeWriter *writer, Py_ssize_t arglen; if (*f == 'u') { - if (longflag) - len = sprintf(buffer, "%lu", - va_arg(*vargs, unsigned long)); - else if (longlongflag) - len = sprintf(buffer, "%llu", - va_arg(*vargs, unsigned long long)); - else if (size_tflag) - len = sprintf(buffer, "%" PY_FORMAT_SIZE_T "u", - va_arg(*vargs, size_t)); - else - len = sprintf(buffer, "%u", - va_arg(*vargs, unsigned int)); + if (longflag) { + len = sprintf(buffer, "%lu", va_arg(*vargs, unsigned long)); + } + else if (longlongflag) { + len = sprintf(buffer, "%llu", va_arg(*vargs, unsigned long long)); + } + else if (size_tflag) { + len = sprintf(buffer, "%zu", va_arg(*vargs, size_t)); + } + else { + len = sprintf(buffer, "%u", va_arg(*vargs, unsigned int)); + } } else if (*f == 'x') { len = sprintf(buffer, "%x", va_arg(*vargs, int)); } else { - if (longflag) - len = sprintf(buffer, "%li", - va_arg(*vargs, long)); - else if (longlongflag) - len = sprintf(buffer, "%lli", - va_arg(*vargs, long long)); - else if (size_tflag) - len = sprintf(buffer, "%" PY_FORMAT_SIZE_T "i", - va_arg(*vargs, Py_ssize_t)); - else - len = sprintf(buffer, "%i", - va_arg(*vargs, int)); + if (longflag) { + len = sprintf(buffer, "%li", va_arg(*vargs, long)); + } + else if (longlongflag) { + len = sprintf(buffer, "%lli", va_arg(*vargs, long long)); + } + else if (size_tflag) { + len = sprintf(buffer, "%zi", va_arg(*vargs, Py_ssize_t)); + } + else { + len = sprintf(buffer, "%i", va_arg(*vargs, int)); + } } assert(len >= 0); @@ -15657,8 +15656,7 @@ unicode_release_interned(void) Py_ssize_t n = PyList_GET_SIZE(keys); #ifdef INTERNED_STATS - fprintf(stderr, "releasing %" PY_FORMAT_SIZE_T "d interned strings\n", - n); + fprintf(stderr, "releasing %zd interned strings\n", n); Py_ssize_t immortal_size = 0, mortal_size = 0; #endif @@ -15688,9 +15686,9 @@ unicode_release_interned(void) _PyUnicode_STATE(s).interned = SSTATE_NOT_INTERNED; } #ifdef INTERNED_STATS - fprintf(stderr, "total size of all interned strings: " - "%" PY_FORMAT_SIZE_T "d/%" PY_FORMAT_SIZE_T "d " - "mortal/immortal\n", mortal_size, immortal_size); + fprintf(stderr, + "total size of all interned strings: %zd/%zd mortal/immortal\n", + mortal_size, immortal_size); #endif Py_DECREF(keys); PyDict_Clear(interned); diff --git a/Python/getargs.c b/Python/getargs.c index 524ad917cd08a..d2dba49966d47 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -452,7 +452,7 @@ seterror(Py_ssize_t iarg, const char *msg, int *levels, const char *fname, } if (iarg != 0) { PyOS_snprintf(p, sizeof(buf) - (p - buf), - "argument %" PY_FORMAT_SIZE_T "d", iarg); + "argument %zd", iarg); i = 0; p += strlen(p); while (i < 32 && levels[i] > 0 && (int)(p-buf) < 220) { @@ -540,15 +540,14 @@ converttuple(PyObject *arg, const char **p_format, va_list *p_va, int flags, levels[0] = 0; if (toplevel) { PyOS_snprintf(msgbuf, bufsize, - "expected %d argument%s, not %" PY_FORMAT_SIZE_T "d", + "expected %d argument%s, not %zd", n, n == 1 ? "" : "s", len); } else { PyOS_snprintf(msgbuf, bufsize, - "must be sequence of length %d, " - "not %" PY_FORMAT_SIZE_T "d", + "must be sequence of length %d, not %zd", n, len); } return msgbuf; diff --git a/Python/pyhash.c b/Python/pyhash.c index 3843079fbbce1..3b6c34eefd515 100644 --- a/Python/pyhash.c +++ b/Python/pyhash.c @@ -200,18 +200,14 @@ void _PyHash_Fini(void) { #ifdef Py_HASH_STATS - int i; - Py_ssize_t total = 0; - const char *fmt = "%2i %8" PY_FORMAT_SIZE_T "d %8" PY_FORMAT_SIZE_T "d\n"; - fprintf(stderr, "len calls total\n"); - for (i = 1; i <= Py_HASH_STATS_MAX; i++) { + Py_ssize_t total = 0; + for (int i = 1; i <= Py_HASH_STATS_MAX; i++) { total += hashstats[i]; - fprintf(stderr, fmt, i, hashstats[i], total); + fprintf(stderr, "%2i %8zd %8zd\n", i, hashstats[i], total); } total += hashstats[0]; - fprintf(stderr, "> %8" PY_FORMAT_SIZE_T "d %8" PY_FORMAT_SIZE_T "d\n", - hashstats[0], total); + fprintf(stderr, "> %8zd %8zd\n", hashstats[0], total); #endif } From webhook-mailer at python.org Wed Jun 10 12:49:30 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 10 Jun 2020 16:49:30 -0000 Subject: [Python-checkins] bpo-40826: Fix test_repl.test_close_stdin() on Windows (GH-20779) Message-ID: https://github.com/python/cpython/commit/f6e58aefde2e57e4cb11ea7743955da53a3f1e80 commit: f6e58aefde2e57e4cb11ea7743955da53a3f1e80 branch: master author: Victor Stinner committer: GitHub date: 2020-06-10T18:49:23+02:00 summary: bpo-40826: Fix test_repl.test_close_stdin() on Windows (GH-20779) test_repl.test_close_stdin() now calls support.suppress_msvcrt_asserts() to fix the test on Windows. * Move suppress_msvcrt_asserts() from test.libregrtest.setup to test.support. Make its verbose parameter optional: verbose=False by default. * Add msvcrt.GetErrorMode(). * SuppressCrashReport now uses GetErrorMode() and SetErrorMode() of the msvcrt module, rather than using ctypes. * Remove also an unused variable (deadline) in wait_process(). files: M Lib/test/audit-tests.py M Lib/test/libregrtest/setup.py M Lib/test/support/__init__.py M Lib/test/test_repl.py M PC/clinic/msvcrtmodule.c.h M PC/msvcrtmodule.c diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index b90c4b8f75794..a58395b068b39 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -350,9 +350,9 @@ def hook(event, args): if __name__ == "__main__": - from test.libregrtest.setup import suppress_msvcrt_asserts + from test.support import suppress_msvcrt_asserts - suppress_msvcrt_asserts(False) + suppress_msvcrt_asserts() test = sys.argv[1] globals()[test]() diff --git a/Lib/test/libregrtest/setup.py b/Lib/test/libregrtest/setup.py index ce8149677e0b9..1f264c1be49fe 100644 --- a/Lib/test/libregrtest/setup.py +++ b/Lib/test/libregrtest/setup.py @@ -69,7 +69,7 @@ def setup_tests(ns): if ns.threshold is not None: gc.set_threshold(ns.threshold) - suppress_msvcrt_asserts(ns.verbose and ns.verbose >= 2) + support.suppress_msvcrt_asserts(ns.verbose and ns.verbose >= 2) support.use_resources = ns.use_resources @@ -93,31 +93,6 @@ def _test_audit_hook(name, args): support.LONG_TIMEOUT = min(support.LONG_TIMEOUT, ns.timeout) -def suppress_msvcrt_asserts(verbose): - try: - import msvcrt - except ImportError: - return - - msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS| - msvcrt.SEM_NOALIGNMENTFAULTEXCEPT| - msvcrt.SEM_NOGPFAULTERRORBOX| - msvcrt.SEM_NOOPENFILEERRORBOX) - try: - msvcrt.CrtSetReportMode - except AttributeError: - # release build - return - - for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]: - if verbose: - msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE) - msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR) - else: - msvcrt.CrtSetReportMode(m, 0) - - - def replace_stdout(): """Set stdout encoder error handler to backslashreplace (as stderr error handler) to avoid UnicodeEncodeError when printing a traceback""" diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 3a5f7b556d767..83b21733de0f1 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1899,6 +1899,27 @@ def test__all__(self): test_case.assertCountEqual(module.__all__, expected) +def suppress_msvcrt_asserts(verbose=False): + try: + import msvcrt + except ImportError: + return + + msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS + | msvcrt.SEM_NOALIGNMENTFAULTEXCEPT + | msvcrt.SEM_NOGPFAULTERRORBOX + | msvcrt.SEM_NOOPENFILEERRORBOX) + + # CrtSetReportMode() is only available in debug build + if hasattr(msvcrt, 'CrtSetReportMode'): + for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]: + if verbose: + msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE) + msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR) + else: + msvcrt.CrtSetReportMode(m, 0) + + class SuppressCrashReport: """Try to prevent a crash report from popping up. @@ -1910,30 +1931,25 @@ class SuppressCrashReport: def __enter__(self): """On Windows, disable Windows Error Reporting dialogs using - SetErrorMode. + SetErrorMode() and CrtSetReportMode(). On UNIX, try to save the previous core file size limit, then set soft limit to 0. """ if sys.platform.startswith('win'): # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx - # GetErrorMode is not available on Windows XP and Windows Server 2003, - # but SetErrorMode returns the previous value, so we can use that - import ctypes - self._k32 = ctypes.windll.kernel32 - SEM_NOGPFAULTERRORBOX = 0x02 - self.old_value = self._k32.SetErrorMode(SEM_NOGPFAULTERRORBOX) - self._k32.SetErrorMode(self.old_value | SEM_NOGPFAULTERRORBOX) - - # Suppress assert dialogs in debug builds - # (see http://bugs.python.org/issue23314) try: import msvcrt - msvcrt.CrtSetReportMode - except (AttributeError, ImportError): - # no msvcrt or a release build - pass - else: + except ImportError: + return + + self.old_value = msvcrt.GetErrorMode() + + msvcrt.SetErrorMode(self.old_value | msvcrt.SEM_NOGPFAULTERRORBOX) + + # bpo-23314: Suppress assert dialogs in debug builds. + # CrtSetReportMode() is only available in debug build. + if hasattr(msvcrt, 'CrtSetReportMode'): self.old_modes = {} for report_type in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, @@ -1985,10 +2001,10 @@ def __exit__(self, *ignore_exc): return if sys.platform.startswith('win'): - self._k32.SetErrorMode(self.old_value) + import msvcrt + msvcrt.SetErrorMode(self.old_value) if self.old_modes: - import msvcrt for report_type, (old_mode, old_file) in self.old_modes.items(): msvcrt.CrtSetReportMode(report_type, old_mode) msvcrt.CrtSetReportFile(report_type, old_file) @@ -2332,7 +2348,6 @@ def wait_process(pid, *, exitcode, timeout=None): if timeout is None: timeout = SHORT_TIMEOUT t0 = time.monotonic() - deadline = t0 + timeout sleep = 0.001 max_sleep = 0.1 while True: diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index 563f188706b93..03bf8d8b5483f 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -98,7 +98,11 @@ def test_close_stdin(self): print("before close") os.close(0) ''') - process = spawn_repl() + prepare_repl = dedent(''' + from test.support import suppress_msvcrt_asserts + suppress_msvcrt_asserts() + ''') + process = spawn_repl('-c', prepare_repl) output = process.communicate(user_input)[0] self.assertEqual(process.returncode, 0) self.assertIn('before close', output) diff --git a/PC/clinic/msvcrtmodule.c.h b/PC/clinic/msvcrtmodule.c.h index 9701e8a63be8c..1ac82cb965b64 100644 --- a/PC/clinic/msvcrtmodule.c.h +++ b/PC/clinic/msvcrtmodule.c.h @@ -590,6 +590,24 @@ msvcrt_set_error_mode(PyObject *module, PyObject *arg) #endif /* defined(_DEBUG) */ +PyDoc_STRVAR(msvcrt_GetErrorMode__doc__, +"GetErrorMode($module, /)\n" +"--\n" +"\n" +"Wrapper around GetErrorMode."); + +#define MSVCRT_GETERRORMODE_METHODDEF \ + {"GetErrorMode", (PyCFunction)msvcrt_GetErrorMode, METH_NOARGS, msvcrt_GetErrorMode__doc__}, + +static PyObject * +msvcrt_GetErrorMode_impl(PyObject *module); + +static PyObject * +msvcrt_GetErrorMode(PyObject *module, PyObject *Py_UNUSED(ignored)) +{ + return msvcrt_GetErrorMode_impl(module); +} + PyDoc_STRVAR(msvcrt_SetErrorMode__doc__, "SetErrorMode($module, mode, /)\n" "--\n" @@ -629,4 +647,4 @@ msvcrt_SetErrorMode(PyObject *module, PyObject *arg) #ifndef MSVCRT_SET_ERROR_MODE_METHODDEF #define MSVCRT_SET_ERROR_MODE_METHODDEF #endif /* !defined(MSVCRT_SET_ERROR_MODE_METHODDEF) */ -/*[clinic end generated code: output=ab3b5ce5c1447f0e input=a9049054013a1b77]*/ +/*[clinic end generated code: output=20dfbc768edce7c0 input=a9049054013a1b77]*/ diff --git a/PC/msvcrtmodule.c b/PC/msvcrtmodule.c index faceb03fba39d..b7ff20ab0fb1c 100644 --- a/PC/msvcrtmodule.c +++ b/PC/msvcrtmodule.c @@ -482,6 +482,25 @@ msvcrt_set_error_mode_impl(PyObject *module, int mode) } #endif /* _DEBUG */ +/*[clinic input] +msvcrt.GetErrorMode + +Wrapper around GetErrorMode. +[clinic start generated code]*/ + +static PyObject * +msvcrt_GetErrorMode_impl(PyObject *module) +/*[clinic end generated code: output=3103fc6145913591 input=5a7fb083b6dd71fd]*/ +{ + unsigned int res; + + _Py_BEGIN_SUPPRESS_IPH + res = GetErrorMode(); + _Py_END_SUPPRESS_IPH + + return PyLong_FromUnsignedLong(res); +} + /*[clinic input] msvcrt.SetErrorMode @@ -520,6 +539,7 @@ static struct PyMethodDef msvcrt_functions[] = { MSVCRT_GETCHE_METHODDEF MSVCRT_PUTCH_METHODDEF MSVCRT_UNGETCH_METHODDEF + MSVCRT_GETERRORMODE_METHODDEF MSVCRT_SETERRORMODE_METHODDEF MSVCRT_CRTSETREPORTFILE_METHODDEF MSVCRT_CRTSETREPORTMODE_METHODDEF From webhook-mailer at python.org Wed Jun 10 13:33:20 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 10 Jun 2020 17:33:20 -0000 Subject: [Python-checkins] _PyPreConfig_Read() decodes argv at each iteration (GH-20786) Message-ID: https://github.com/python/cpython/commit/8eb4aea26297daf105108c4866d4c692bd8b81f9 commit: 8eb4aea26297daf105108c4866d4c692bd8b81f9 branch: master author: Victor Stinner committer: GitHub date: 2020-06-10T19:33:11+02:00 summary: _PyPreConfig_Read() decodes argv at each iteration (GH-20786) _PyPreConfig_Read() now calls _PyPreCmdline_SetArgv() at each iteration, so bytes strings are decoded from the new encoding. files: M Python/preconfig.c diff --git a/Python/preconfig.c b/Python/preconfig.c index fd94d7dda1c29..149afcd99ab32 100644 --- a/Python/preconfig.c +++ b/Python/preconfig.c @@ -829,13 +829,6 @@ _PyPreConfig_Read(PyPreConfig *config, const _PyArgv *args) int init_legacy_encoding = Py_LegacyWindowsFSEncodingFlag; #endif - if (args) { - status = _PyPreCmdline_SetArgv(&cmdline, args); - if (_PyStatus_EXCEPTION(status)) { - goto done; - } - } - int locale_coerced = 0; int loops = 0; @@ -846,7 +839,7 @@ _PyPreConfig_Read(PyPreConfig *config, const _PyArgv *args) loops++; if (loops == 3) { status = _PyStatus_ERR("Encoding changed twice while " - "reading the configuration"); + "reading the configuration"); goto done; } @@ -857,6 +850,15 @@ _PyPreConfig_Read(PyPreConfig *config, const _PyArgv *args) Py_LegacyWindowsFSEncodingFlag = config->legacy_windows_fs_encoding; #endif + if (args) { + // Set command line arguments at each iteration. If they are bytes + // strings, they are decoded from the new encoding. + status = _PyPreCmdline_SetArgv(&cmdline, args); + if (_PyStatus_EXCEPTION(status)) { + goto done; + } + } + status = preconfig_read(config, &cmdline); if (_PyStatus_EXCEPTION(status)) { goto done; @@ -896,7 +898,7 @@ _PyPreConfig_Read(PyPreConfig *config, const _PyArgv *args) } /* Reset the configuration before reading again the configuration, - just keep UTF-8 Mode value. */ + just keep UTF-8 Mode and coerce C locale value. */ int new_utf8_mode = config->utf8_mode; int new_coerce_c_locale = config->coerce_c_locale; preconfig_copy(config, &save_config); From webhook-mailer at python.org Wed Jun 10 13:45:55 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 10 Jun 2020 17:45:55 -0000 Subject: [Python-checkins] bpo-40826: Fix test_repl.test_close_stdin() on Windows (GH-20779) (GH-20785) (GH-20787) Message-ID: https://github.com/python/cpython/commit/c7a6c7b5279f766e65b7cc9dc5bebb73acee6672 commit: c7a6c7b5279f766e65b7cc9dc5bebb73acee6672 branch: 3.8 author: Victor Stinner committer: GitHub date: 2020-06-10T19:45:47+02:00 summary: bpo-40826: Fix test_repl.test_close_stdin() on Windows (GH-20779) (GH-20785) (GH-20787) test_repl.test_close_stdin() now calls support.suppress_msvcrt_asserts() to fix the test on Windows. * Move suppress_msvcrt_asserts() from test.libregrtest.setup to test.support. Make its verbose parameter optional: verbose=False by default. * SuppressCrashReport now uses SetErrorMode() of the msvcrt module, rather than using ctypes. * Remove also an unused variable (deadline) in wait_process(). (cherry picked from commit f6e58aefde2e57e4cb11ea7743955da53a3f1e80) (cherry picked from commit 4a4f660cfde8b683634c53e6214a6baa51de43b1) files: M Lib/test/audit-tests.py M Lib/test/libregrtest/setup.py M Lib/test/support/__init__.py M Lib/test/test_repl.py diff --git a/Lib/test/audit-tests.py b/Lib/test/audit-tests.py index b90c4b8f75794..a58395b068b39 100644 --- a/Lib/test/audit-tests.py +++ b/Lib/test/audit-tests.py @@ -350,9 +350,9 @@ def hook(event, args): if __name__ == "__main__": - from test.libregrtest.setup import suppress_msvcrt_asserts + from test.support import suppress_msvcrt_asserts - suppress_msvcrt_asserts(False) + suppress_msvcrt_asserts() test = sys.argv[1] globals()[test]() diff --git a/Lib/test/libregrtest/setup.py b/Lib/test/libregrtest/setup.py index 1e0eac3b0fbbd..2b0bdf9997d95 100644 --- a/Lib/test/libregrtest/setup.py +++ b/Lib/test/libregrtest/setup.py @@ -67,7 +67,7 @@ def setup_tests(ns): if ns.threshold is not None: gc.set_threshold(ns.threshold) - suppress_msvcrt_asserts(ns.verbose and ns.verbose >= 2) + support.suppress_msvcrt_asserts(ns.verbose and ns.verbose >= 2) support.use_resources = ns.use_resources @@ -78,31 +78,6 @@ def _test_audit_hook(name, args): sys.addaudithook(_test_audit_hook) -def suppress_msvcrt_asserts(verbose): - try: - import msvcrt - except ImportError: - return - - msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS| - msvcrt.SEM_NOALIGNMENTFAULTEXCEPT| - msvcrt.SEM_NOGPFAULTERRORBOX| - msvcrt.SEM_NOOPENFILEERRORBOX) - try: - msvcrt.CrtSetReportMode - except AttributeError: - # release build - return - - for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]: - if verbose: - msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE) - msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR) - else: - msvcrt.CrtSetReportMode(m, 0) - - - def replace_stdout(): """Set stdout encoder error handler to backslashreplace (as stderr error handler) to avoid UnicodeEncodeError when printing a traceback""" diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 400eebc521454..08d53ec7064b4 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -2825,6 +2825,27 @@ def test__all__(self): test_case.assertCountEqual(module.__all__, expected) +def suppress_msvcrt_asserts(verbose=False): + try: + import msvcrt + except ImportError: + return + + msvcrt.SetErrorMode(msvcrt.SEM_FAILCRITICALERRORS + | msvcrt.SEM_NOALIGNMENTFAULTEXCEPT + | msvcrt.SEM_NOGPFAULTERRORBOX + | msvcrt.SEM_NOOPENFILEERRORBOX) + + # CrtSetReportMode() is only available in debug build + if hasattr(msvcrt, 'CrtSetReportMode'): + for m in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, msvcrt.CRT_ASSERT]: + if verbose: + msvcrt.CrtSetReportMode(m, msvcrt.CRTDBG_MODE_FILE) + msvcrt.CrtSetReportFile(m, msvcrt.CRTDBG_FILE_STDERR) + else: + msvcrt.CrtSetReportMode(m, 0) + + class SuppressCrashReport: """Try to prevent a crash report from popping up. @@ -2836,7 +2857,7 @@ class SuppressCrashReport: def __enter__(self): """On Windows, disable Windows Error Reporting dialogs using - SetErrorMode. + SetErrorMode() and CrtSetReportMode(). On UNIX, try to save the previous core file size limit, then set soft limit to 0. @@ -2845,21 +2866,18 @@ def __enter__(self): # see http://msdn.microsoft.com/en-us/library/windows/desktop/ms680621.aspx # GetErrorMode is not available on Windows XP and Windows Server 2003, # but SetErrorMode returns the previous value, so we can use that - import ctypes - self._k32 = ctypes.windll.kernel32 - SEM_NOGPFAULTERRORBOX = 0x02 - self.old_value = self._k32.SetErrorMode(SEM_NOGPFAULTERRORBOX) - self._k32.SetErrorMode(self.old_value | SEM_NOGPFAULTERRORBOX) - - # Suppress assert dialogs in debug builds - # (see http://bugs.python.org/issue23314) try: import msvcrt - msvcrt.CrtSetReportMode - except (AttributeError, ImportError): - # no msvcrt or a release build - pass - else: + except ImportError: + return + + self.old_value = msvcrt.SetErrorMode(msvcrt.SEM_NOGPFAULTERRORBOX) + + msvcrt.SetErrorMode(self.old_value | msvcrt.SEM_NOGPFAULTERRORBOX) + + # bpo-23314: Suppress assert dialogs in debug builds. + # CrtSetReportMode() is only available in debug build. + if hasattr(msvcrt, 'CrtSetReportMode'): self.old_modes = {} for report_type in [msvcrt.CRT_WARN, msvcrt.CRT_ERROR, @@ -2905,10 +2923,10 @@ def __exit__(self, *ignore_exc): return if sys.platform.startswith('win'): - self._k32.SetErrorMode(self.old_value) + import msvcrt + msvcrt.SetErrorMode(self.old_value) if self.old_modes: - import msvcrt for report_type, (old_mode, old_file) in self.old_modes.items(): msvcrt.CrtSetReportMode(report_type, old_mode) msvcrt.CrtSetReportFile(report_type, old_file) diff --git a/Lib/test/test_repl.py b/Lib/test/test_repl.py index 563f188706b93..03bf8d8b5483f 100644 --- a/Lib/test/test_repl.py +++ b/Lib/test/test_repl.py @@ -98,7 +98,11 @@ def test_close_stdin(self): print("before close") os.close(0) ''') - process = spawn_repl() + prepare_repl = dedent(''' + from test.support import suppress_msvcrt_asserts + suppress_msvcrt_asserts() + ''') + process = spawn_repl('-c', prepare_repl) output = process.communicate(user_input)[0] self.assertEqual(process.returncode, 0) self.assertIn('before close', output) From webhook-mailer at python.org Wed Jun 10 13:54:34 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 10 Jun 2020 17:54:34 -0000 Subject: [Python-checkins] bpo-40927: Fix test_binhex when run twice (GH-20764) Message-ID: https://github.com/python/cpython/commit/9c24e2e4c10705d95258558348417a28007dac66 commit: 9c24e2e4c10705d95258558348417a28007dac66 branch: master author: Victor Stinner committer: GitHub date: 2020-06-10T19:54:29+02:00 summary: bpo-40927: Fix test_binhex when run twice (GH-20764) test_binhex now uses import_fresh_module() to ensure that it raises DeprecationWarning each time. files: A Misc/NEWS.d/next/Tests/2020-06-09-18-48-18.bpo-40927.67ylLg.rst M Lib/test/test_binhex.py diff --git a/Lib/test/test_binhex.py b/Lib/test/test_binhex.py index 86ca37ce1b99a..859553222a3e9 100644 --- a/Lib/test/test_binhex.py +++ b/Lib/test/test_binhex.py @@ -7,7 +7,7 @@ from test import support with support.check_warnings(('', DeprecationWarning)): - import binhex + binhex = support.import_fresh_module('binhex') class BinHexTestCase(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Tests/2020-06-09-18-48-18.bpo-40927.67ylLg.rst b/Misc/NEWS.d/next/Tests/2020-06-09-18-48-18.bpo-40927.67ylLg.rst new file mode 100644 index 0000000000000..66209b84c94d6 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-09-18-48-18.bpo-40927.67ylLg.rst @@ -0,0 +1,2 @@ +Fix test_binhex when run twice: it now uses import_fresh_module() to ensure +that it raises DeprecationWarning each time. From webhook-mailer at python.org Wed Jun 10 14:08:30 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 10 Jun 2020 18:08:30 -0000 Subject: [Python-checkins] bpo-39465: Use _PyInterpreterState_GET() (GH-20788) Message-ID: https://github.com/python/cpython/commit/1bcc32f0620d2e99649a6d423284d9496b7b3548 commit: 1bcc32f0620d2e99649a6d423284d9496b7b3548 branch: master author: Victor Stinner committer: GitHub date: 2020-06-10T20:08:26+02:00 summary: bpo-39465: Use _PyInterpreterState_GET() (GH-20788) Replace _PyThreadState_GET() with _PyInterpreterState_GET() in: * get_small_int() * gcmodule.c: add also get_gc_state() function * _PyTrash_deposit_object() * _PyTrash_destroy_chain() * warnings_get_state() * Py_GetRecursionLimit() Cleanup listnode.c: add 'parser' variable. files: M Include/internal/pycore_interp.h M Modules/gcmodule.c M Objects/longobject.c M Objects/object.c M Parser/listnode.c M Python/_warnings.c M Python/ceval.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 4f811023f7a04..981b73340b7ea 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -13,7 +13,12 @@ extern "C" { #include "pycore_gc.h" /* struct _gc_runtime_state */ #include "pycore_warnings.h" /* struct _warnings_runtime_state */ -/* ceval state */ +struct _Py_parser_state { + struct { + int level; + int atbol; + } listnode; +}; struct _pending_calls { PyThread_type_lock lock; @@ -209,12 +214,7 @@ struct _is { PyObject *audit_hooks; - struct { - struct { - int level; - int atbol; - } listnode; - } parser; + struct _Py_parser_state parser; #if _PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS > 0 /* Small integers are preallocated in this array so that they diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index b3bcc8aa4263e..444db7b03b4a5 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -128,6 +128,15 @@ gc_decref(PyGC_Head *g) #define GEN_HEAD(gcstate, n) (&(gcstate)->generations[n].head) + +static GCState * +get_gc_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->gc; +} + + void _PyGC_InitState(GCState *gcstate) { @@ -1465,8 +1474,7 @@ static PyObject * gc_enable_impl(PyObject *module) /*[clinic end generated code: output=45a427e9dce9155c input=81ac4940ca579707]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); gcstate->enabled = 1; Py_RETURN_NONE; } @@ -1481,8 +1489,7 @@ static PyObject * gc_disable_impl(PyObject *module) /*[clinic end generated code: output=97d1030f7aa9d279 input=8c2e5a14e800d83b]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); gcstate->enabled = 0; Py_RETURN_NONE; } @@ -1497,8 +1504,7 @@ static int gc_isenabled_impl(PyObject *module) /*[clinic end generated code: output=1874298331c49130 input=30005e0422373b31]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); return gcstate->enabled; } @@ -1563,8 +1569,7 @@ static PyObject * gc_set_debug_impl(PyObject *module, int flags) /*[clinic end generated code: output=7c8366575486b228 input=5e5ce15e84fbed15]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); gcstate->debug = flags; Py_RETURN_NONE; } @@ -1579,8 +1584,7 @@ static int gc_get_debug_impl(PyObject *module) /*[clinic end generated code: output=91242f3506cd1e50 input=91a101e1c3b98366]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); return gcstate->debug; } @@ -1593,8 +1597,7 @@ PyDoc_STRVAR(gc_set_thresh__doc__, static PyObject * gc_set_threshold(PyObject *self, PyObject *args) { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); if (!PyArg_ParseTuple(args, "i|ii:set_threshold", &gcstate->generations[0].threshold, &gcstate->generations[1].threshold, @@ -1617,8 +1620,7 @@ static PyObject * gc_get_threshold_impl(PyObject *module) /*[clinic end generated code: output=7902bc9f41ecbbd8 input=286d79918034d6e6]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); return Py_BuildValue("(iii)", gcstate->generations[0].threshold, gcstate->generations[1].threshold, @@ -1635,8 +1637,7 @@ static PyObject * gc_get_count_impl(PyObject *module) /*[clinic end generated code: output=354012e67b16398f input=a392794a08251751]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); return Py_BuildValue("(iii)", gcstate->generations[0].count, gcstate->generations[1].count, @@ -1679,15 +1680,13 @@ Return the list of objects that directly refer to any of objs."); static PyObject * gc_get_referrers(PyObject *self, PyObject *args) { - PyThreadState *tstate = _PyThreadState_GET(); - int i; PyObject *result = PyList_New(0); if (!result) { return NULL; } - GCState *gcstate = &tstate->interp->gc; - for (i = 0; i < NUM_GENERATIONS; i++) { + GCState *gcstate = get_gc_state(); + for (int i = 0; i < NUM_GENERATIONS; i++) { if (!(gc_referrers_for(args, GEN_HEAD(gcstate, i), result))) { Py_DECREF(result); return NULL; @@ -1806,11 +1805,10 @@ gc_get_stats_impl(PyObject *module) { int i; struct gc_generation_stats stats[NUM_GENERATIONS], *st; - PyThreadState *tstate = _PyThreadState_GET(); /* To get consistent values despite allocations while constructing the result list, we use a snapshot of the running stats. */ - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); for (i = 0; i < NUM_GENERATIONS; i++) { stats[i] = gcstate->generation_stats[i]; } @@ -1901,8 +1899,7 @@ static PyObject * gc_freeze_impl(PyObject *module) /*[clinic end generated code: output=502159d9cdc4c139 input=b602b16ac5febbe5]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); for (int i = 0; i < NUM_GENERATIONS; ++i) { gc_list_merge(GEN_HEAD(gcstate, i), &gcstate->permanent_generation.head); gcstate->generations[i].count = 0; @@ -1922,8 +1919,7 @@ static PyObject * gc_unfreeze_impl(PyObject *module) /*[clinic end generated code: output=1c15f2043b25e169 input=2dd52b170f4cef6c]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); gc_list_merge(&gcstate->permanent_generation.head, GEN_HEAD(gcstate, NUM_GENERATIONS-1)); Py_RETURN_NONE; @@ -1939,8 +1935,7 @@ static Py_ssize_t gc_get_freeze_count_impl(PyObject *module) /*[clinic end generated code: output=61cbd9f43aa032e1 input=45ffbc65cfe2a6ed]*/ { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); return gc_list_size(&gcstate->permanent_generation.head); } @@ -2006,8 +2001,7 @@ static struct PyModuleDef gcmodule = { PyMODINIT_FUNC PyInit_gc(void) { - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); PyObject *m = PyModule_Create(&gcmodule); @@ -2316,8 +2310,7 @@ PyObject_GC_Del(void *op) if (_PyObject_GC_IS_TRACKED(op)) { gc_list_remove(g); } - PyThreadState *tstate = _PyThreadState_GET(); - GCState *gcstate = &tstate->interp->gc; + GCState *gcstate = get_gc_state(); if (gcstate->generations[0].count > 0) { gcstate->generations[0].count--; } diff --git a/Objects/longobject.c b/Objects/longobject.c index ce10c4f66586a..dead3e306943c 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -41,8 +41,8 @@ static PyObject * get_small_int(sdigit ival) { assert(IS_SMALL_INT(ival)); - PyThreadState *tstate = _PyThreadState_GET(); - PyObject *v = (PyObject*)tstate->interp->small_ints[ival + NSMALLNEGINTS]; + PyInterpreterState *interp = _PyInterpreterState_GET(); + PyObject *v = (PyObject*)interp->small_ints[ival + NSMALLNEGINTS]; Py_INCREF(v); return v; } diff --git a/Objects/object.c b/Objects/object.c index 10cbd1b7c16f5..0ab5de28499a8 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -2029,8 +2029,8 @@ Py_ReprLeave(PyObject *obj) void _PyTrash_deposit_object(PyObject *op) { - PyThreadState *tstate = _PyThreadState_GET(); - struct _gc_runtime_state *gcstate = &tstate->interp->gc; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _gc_runtime_state *gcstate = &interp->gc; _PyObject_ASSERT(op, _PyObject_IS_GC(op)); _PyObject_ASSERT(op, !_PyObject_GC_IS_TRACKED(op)); @@ -2057,8 +2057,8 @@ _PyTrash_thread_deposit_object(PyObject *op) void _PyTrash_destroy_chain(void) { - PyThreadState *tstate = _PyThreadState_GET(); - struct _gc_runtime_state *gcstate = &tstate->interp->gc; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _gc_runtime_state *gcstate = &interp->gc; while (gcstate->trash_delete_later) { PyObject *op = gcstate->trash_delete_later; diff --git a/Parser/listnode.c b/Parser/listnode.c index c806b98e48c35..41e7a033a1fa6 100644 --- a/Parser/listnode.c +++ b/Parser/listnode.c @@ -30,8 +30,6 @@ listnode(FILE *fp, node *n) static void list1node(FILE *fp, node *n) { - PyInterpreterState *interp; - if (n == NULL) return; if (ISNONTERMINAL(TYPE(n))) { @@ -40,26 +38,28 @@ list1node(FILE *fp, node *n) list1node(fp, CHILD(n, i)); } else if (ISTERMINAL(TYPE(n))) { - interp = _PyInterpreterState_GET(); + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_parser_state *parser = &interp->parser; switch (TYPE(n)) { case INDENT: - interp->parser.listnode.level++; + parser->listnode.level++; break; case DEDENT: - interp->parser.listnode.level--; + parser->listnode.level--; break; default: - if (interp->parser.listnode.atbol) { + if (parser->listnode.atbol) { int i; - for (i = 0; i < interp->parser.listnode.level; ++i) + for (i = 0; i < parser->listnode.level; ++i) { fprintf(fp, "\t"); - interp->parser.listnode.atbol = 0; + } + parser->listnode.atbol = 0; } if (TYPE(n) == NEWLINE) { if (STR(n) != NULL) fprintf(fp, "%s", STR(n)); fprintf(fp, "\n"); - interp->parser.listnode.atbol = 1; + parser->listnode.atbol = 1; } else fprintf(fp, "%s ", STR(n)); diff --git a/Python/_warnings.c b/Python/_warnings.c index 4d65bb30c8e5c..86bbfa1c8db86 100644 --- a/Python/_warnings.c +++ b/Python/_warnings.c @@ -32,14 +32,14 @@ _Py_IDENTIFIER(__name__); static WarningsState * warnings_get_state(void) { - PyThreadState *tstate = _PyThreadState_GET(); - if (tstate == NULL) { - _PyErr_SetString(tstate, PyExc_RuntimeError, - "warnings_get_state: could not identify " - "current interpreter"); + PyInterpreterState *interp = _PyInterpreterState_GET(); + if (interp == NULL) { + PyErr_SetString(PyExc_RuntimeError, + "warnings_get_state: could not identify " + "current interpreter"); return NULL; } - return &tstate->interp->warnings; + return &interp->warnings; } /* Clear the given warnings module state. */ diff --git a/Python/ceval.c b/Python/ceval.c index d1d0779318571..9f2cbb06e606a 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -788,8 +788,8 @@ _PyEval_FiniState(struct _ceval_state *ceval) int Py_GetRecursionLimit(void) { - PyThreadState *tstate = _PyThreadState_GET(); - return tstate->interp->ceval.recursion_limit; + PyInterpreterState *interp = _PyInterpreterState_GET(); + return interp->ceval.recursion_limit; } void From webhook-mailer at python.org Wed Jun 10 16:26:58 2020 From: webhook-mailer at python.org (Daniel Fortunov) Date: Wed, 10 Jun 2020 20:26:58 -0000 Subject: [Python-checkins] bpo-40895: Update weakref documentation to remove old warnings (GH-20687) Message-ID: https://github.com/python/cpython/commit/1642c0ef750f96664a98cadb09301d492098d2fb commit: 1642c0ef750f96664a98cadb09301d492098d2fb branch: master author: Daniel Fortunov committer: GitHub date: 2020-06-10T13:26:49-07:00 summary: bpo-40895: Update weakref documentation to remove old warnings (GH-20687) The doccumentation at https://docs.python.org/3.10/library/weakref.html cautions that the `WeakKeyDictionary` and `WeakValueDictionary` are susceptible to the problem of dictionary mutation during iteration. These notes present the user with a problem that has no easy solution. I dug into the implementation and found that fortunately, Antoine Pitrou already addressed this challenge (10 years ago!) by introducing an `_IterationGuard` context manager to the implementation, which delays mutation while an iteration is in progress. I asked for confirmation and @pitrou agreed that these notes could be removed: https://github.com/python/cpython/commit/c1baa601e2b558deb690edfdf334fceee3b03327#commitcomment-39514438 files: M Doc/library/weakref.rst diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index 12eb985c34435..d3c3a070f38af 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -163,14 +163,6 @@ Extension types can easily be made to support weak references; see application without adding attributes to those objects. This can be especially useful with objects that override attribute accesses. - .. note:: - - Caution: Because a :class:`WeakKeyDictionary` is built on top of a Python - dictionary, it must not change size when iterating over it. This can be - difficult to ensure for a :class:`WeakKeyDictionary` because actions - performed by the program during iteration may cause items in the - dictionary to vanish "by magic" (as a side effect of garbage collection). - .. versionchanged:: 3.9 Added support for ``|`` and ``|=`` operators, specified in :pep:`584`. @@ -192,14 +184,6 @@ than needed. Mapping class that references values weakly. Entries in the dictionary will be discarded when no strong reference to the value exists any more. - .. note:: - - Caution: Because a :class:`WeakValueDictionary` is built on top of a Python - dictionary, it must not change size when iterating over it. This can be - difficult to ensure for a :class:`WeakValueDictionary` because actions performed - by the program during iteration may cause items in the dictionary to vanish "by - magic" (as a side effect of garbage collection). - .. versionchanged:: 3.9 Added support for ``|`` and ``|=`` operators, as specified in :pep:`584`. From webhook-mailer at python.org Wed Jun 10 16:37:26 2020 From: webhook-mailer at python.org (Antoine Pitrou) Date: Wed, 10 Jun 2020 20:37:26 -0000 Subject: [Python-checkins] [3.7] bpo-40895: Update weakref documentation to remove old warnings (GH-20687) (GH-20793) Message-ID: https://github.com/python/cpython/commit/049039832da3d02592d680cebf71ab8a665a6564 commit: 049039832da3d02592d680cebf71ab8a665a6564 branch: 3.7 author: Antoine Pitrou committer: GitHub date: 2020-06-10T13:37:21-07:00 summary: [3.7] bpo-40895: Update weakref documentation to remove old warnings (GH-20687) (GH-20793) The doccumentation at https://docs.python.org/3.10/library/weakref.html cautions that the `WeakKeyDictionary` and `WeakValueDictionary` are susceptible to the problem of dictionary mutation during iteration. These notes present the user with a problem that has no easy solution. I dug into the implementation and found that fortunately, Antoine Pitrou already addressed this challenge (10 years ago!) by introducing an `_IterationGuard` context manager to the implementation, which delays mutation while an iteration is in progress. I asked for confirmation and @pitrou agreed that these notes could be removed: https://github.com/python/cpython/commit/c1baa601e2b558deb690edfdf334fceee3b03327GH-commitcomment-39514438. (cherry picked from commit 1642c0ef750f96664a98cadb09301d492098d2fb) Co-authored-by: Daniel Fortunov Automerge-Triggered-By: @pitrou files: M Doc/library/weakref.rst diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index 93efcef1501b4..c92abe73785fe 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -159,13 +159,6 @@ Extension types can easily be made to support weak references; see application without adding attributes to those objects. This can be especially useful with objects that override attribute accesses. - .. note:: - - Caution: Because a :class:`WeakKeyDictionary` is built on top of a Python - dictionary, it must not change size when iterating over it. This can be - difficult to ensure for a :class:`WeakKeyDictionary` because actions - performed by the program during iteration may cause items in the - dictionary to vanish "by magic" (as a side effect of garbage collection). :class:`WeakKeyDictionary` objects have an additional method that exposes the internal references directly. The references are not guaranteed to @@ -185,13 +178,6 @@ than needed. Mapping class that references values weakly. Entries in the dictionary will be discarded when no strong reference to the value exists any more. - .. note:: - - Caution: Because a :class:`WeakValueDictionary` is built on top of a Python - dictionary, it must not change size when iterating over it. This can be - difficult to ensure for a :class:`WeakValueDictionary` because actions performed - by the program during iteration may cause items in the dictionary to vanish "by - magic" (as a side effect of garbage collection). :class:`WeakValueDictionary` objects have an additional method that has the same issues as the :meth:`keyrefs` method of :class:`WeakKeyDictionary` From webhook-mailer at python.org Wed Jun 10 16:38:47 2020 From: webhook-mailer at python.org (Antoine Pitrou) Date: Wed, 10 Jun 2020 20:38:47 -0000 Subject: [Python-checkins] [3.8] bpo-40895: Update weakref documentation to remove old warnings (GH-20687) (GH-20792) Message-ID: https://github.com/python/cpython/commit/972aba86ede0bf254e16a760639a1ff8df298578 commit: 972aba86ede0bf254e16a760639a1ff8df298578 branch: 3.8 author: Antoine Pitrou committer: GitHub date: 2020-06-10T13:38:42-07:00 summary: [3.8] bpo-40895: Update weakref documentation to remove old warnings (GH-20687) (GH-20792) The doccumentation at https://docs.python.org/3.10/library/weakref.html cautions that the `WeakKeyDictionary` and `WeakValueDictionary` are susceptible to the problem of dictionary mutation during iteration. These notes present the user with a problem that has no easy solution. I dug into the implementation and found that fortunately, Antoine Pitrou already addressed this challenge (10 years ago!) by introducing an `_IterationGuard` context manager to the implementation, which delays mutation while an iteration is in progress. I asked for confirmation and @pitrou agreed that these notes could be removed: https://github.com/python/cpython/commit/c1baa601e2b558deb690edfdf334fceee3b03327GH-commitcomment-39514438. (cherry picked from commit 1642c0ef750f96664a98cadb09301d492098d2fb) Co-authored-by: Daniel Fortunov Automerge-Triggered-By: @pitrou files: M Doc/library/weakref.rst diff --git a/Doc/library/weakref.rst b/Doc/library/weakref.rst index 2dbe5e33bd011..0d9f21d2e83a4 100644 --- a/Doc/library/weakref.rst +++ b/Doc/library/weakref.rst @@ -163,13 +163,6 @@ Extension types can easily be made to support weak references; see application without adding attributes to those objects. This can be especially useful with objects that override attribute accesses. - .. note:: - - Caution: Because a :class:`WeakKeyDictionary` is built on top of a Python - dictionary, it must not change size when iterating over it. This can be - difficult to ensure for a :class:`WeakKeyDictionary` because actions - performed by the program during iteration may cause items in the - dictionary to vanish "by magic" (as a side effect of garbage collection). :class:`WeakKeyDictionary` objects have an additional method that exposes the internal references directly. The references are not guaranteed to @@ -189,13 +182,6 @@ than needed. Mapping class that references values weakly. Entries in the dictionary will be discarded when no strong reference to the value exists any more. - .. note:: - - Caution: Because a :class:`WeakValueDictionary` is built on top of a Python - dictionary, it must not change size when iterating over it. This can be - difficult to ensure for a :class:`WeakValueDictionary` because actions performed - by the program during iteration may cause items in the dictionary to vanish "by - magic" (as a side effect of garbage collection). :class:`WeakValueDictionary` objects have an additional method that has the same issues as the :meth:`keyrefs` method of :class:`WeakKeyDictionary` From webhook-mailer at python.org Wed Jun 10 19:31:31 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Wed, 10 Jun 2020 23:31:31 -0000 Subject: [Python-checkins] Restrict co_code to be under INT_MAX in codeobject (GH-20628) Message-ID: https://github.com/python/cpython/commit/3b3b83c965447a8329b34cb4befe6e9908880ee5 commit: 3b3b83c965447a8329b34cb4befe6e9908880ee5 branch: master author: Ammar Askar committer: GitHub date: 2020-06-11T00:31:22+01:00 summary: Restrict co_code to be under INT_MAX in codeobject (GH-20628) files: M Objects/codeobject.c M Objects/frameobject.c diff --git a/Objects/codeobject.c b/Objects/codeobject.c index 737635943aced..cb4fb68124333 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -166,6 +166,14 @@ PyCode_NewWithPosOnlyArgs(int argcount, int posonlyargcount, int kwonlyargcount, return NULL; } + /* Make sure that code is indexable with an int, this is + a long running assumption in ceval.c and many parts of + the interpreter. */ + if (PyBytes_GET_SIZE(code) > INT_MAX) { + PyErr_SetString(PyExc_OverflowError, "co_code larger than INT_MAX"); + return NULL; + } + /* Check for any inner or outer closure references */ n_cellvars = PyTuple_GET_SIZE(cellvars); if (!n_cellvars && !PyTuple_GET_SIZE(freevars)) { diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 0dad42ee7bff3..6e1cbcfaf6f51 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -397,9 +397,9 @@ frame_setlineno(PyFrameObject *f, PyObject* p_new_lineno, void *Py_UNUSED(ignore return -1; } - int len = Py_SAFE_DOWNCAST( - PyBytes_GET_SIZE(f->f_code->co_code)/sizeof(_Py_CODEUNIT), - Py_ssize_t, int); + /* PyCode_NewWithPosOnlyArgs limits co_code to be under INT_MAX so this + * should never overflow. */ + int len = (int)(PyBytes_GET_SIZE(f->f_code->co_code) / sizeof(_Py_CODEUNIT)); int *lines = marklines(f->f_code, len); if (lines == NULL) { return -1; From webhook-mailer at python.org Wed Jun 10 19:51:23 2020 From: webhook-mailer at python.org (Hai Shi) Date: Wed, 10 Jun 2020 23:51:23 -0000 Subject: [Python-checkins] bpo-40275: Add import_helper submodule in test.support (GH-20794) Message-ID: https://github.com/python/cpython/commit/7f888c7ef905842bf7739cc03bd20398329951b5 commit: 7f888c7ef905842bf7739cc03bd20398329951b5 branch: master author: Hai Shi committer: GitHub date: 2020-06-11T01:51:18+02:00 summary: bpo-40275: Add import_helper submodule in test.support (GH-20794) files: A Lib/test/support/import_helper.py M Doc/library/test.rst M Lib/test/support/__init__.py diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 11d748466cba2..a18197aed3f4a 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -398,25 +398,6 @@ The :mod:`test.support` module defines the following constants: The :mod:`test.support` module defines the following functions: -.. function:: forget(module_name) - - Remove the module named *module_name* from ``sys.modules`` and delete any - byte-compiled files of the module. - - -.. function:: unload(name) - - Delete *name* from ``sys.modules``. - - -.. function:: make_legacy_pyc(source) - - Move a :pep:`3147`/:pep:`488` pyc file to its legacy pyc location and return the file - system path to the legacy pyc file. The *source* value is the file system - path to the source file. It does not need to exist, however the PEP - 3147/488 pyc file must exist. - - .. function:: is_resource_enabled(resource) Return ``True`` if *resource* is enabled and available. The list of @@ -889,67 +870,6 @@ The :mod:`test.support` module defines the following functions: Open *url*. If open fails, raises :exc:`TestFailed`. -.. function:: import_module(name, deprecated=False, *, required_on()) - - This function imports and returns the named module. Unlike a normal - import, this function raises :exc:`unittest.SkipTest` if the module - cannot be imported. - - Module and package deprecation messages are suppressed during this import - if *deprecated* is ``True``. If a module is required on a platform but - optional for others, set *required_on* to an iterable of platform prefixes - which will be compared against :data:`sys.platform`. - - .. versionadded:: 3.1 - - -.. function:: import_fresh_module(name, fresh=(), blocked=(), deprecated=False) - - This function imports and returns a fresh copy of the named Python module - by removing the named module from ``sys.modules`` before doing the import. - Note that unlike :func:`reload`, the original module is not affected by - this operation. - - *fresh* is an iterable of additional module names that are also removed - from the ``sys.modules`` cache before doing the import. - - *blocked* is an iterable of module names that are replaced with ``None`` - in the module cache during the import to ensure that attempts to import - them raise :exc:`ImportError`. - - The named module and any modules named in the *fresh* and *blocked* - parameters are saved before starting the import and then reinserted into - ``sys.modules`` when the fresh import is complete. - - Module and package deprecation messages are suppressed during this import - if *deprecated* is ``True``. - - This function will raise :exc:`ImportError` if the named module cannot be - imported. - - Example use:: - - # Get copies of the warnings module for testing without affecting the - # version being used by the rest of the test suite. One copy uses the - # C implementation, the other is forced to use the pure Python fallback - # implementation - py_warnings = import_fresh_module('warnings', blocked=['_warnings']) - c_warnings = import_fresh_module('warnings', fresh=['_warnings']) - - .. versionadded:: 3.1 - - -.. function:: modules_setup() - - Return a copy of :data:`sys.modules`. - - -.. function:: modules_cleanup(oldmodules) - - Remove modules except for *oldmodules* and ``encodings`` in order to - preserve internal cache. - - .. function:: reap_children() Use this at the end of ``test_main`` whenever sub-processes are started. @@ -1113,29 +1033,6 @@ The :mod:`test.support` module defines the following classes: On both platforms, the old value is restored by :meth:`__exit__`. -.. class:: CleanImport(*module_names) - - A context manager to force import to return a new module reference. This - is useful for testing module-level behaviors, such as the emission of a - DeprecationWarning on import. Example usage:: - - with CleanImport('foo'): - importlib.import_module('foo') # New reference. - - -.. class:: DirsOnSysPath(*paths) - - A context manager to temporarily add directories to sys.path. - - This makes a copy of :data:`sys.path`, appends any directories given - as positional arguments, then reverts :data:`sys.path` to the copied - settings when the context ends. - - Note that *all* :data:`sys.path` modifications in the body of the - context manager, including replacement of the object, - will be reverted at the end of the block. - - .. class:: SaveSignals() Class to save and restore signal handlers registered by the Python signal @@ -1646,3 +1543,119 @@ The :mod:`test.support.os_helper` module provides support for os tests. Call :func:`os.unlink` on *filename*. On Windows platforms, this is wrapped with a wait loop that checks for the existence fo the file. + + +:mod:`test.support.import_helper` --- Utilities for import tests +================================================================ + +.. module:: test.support.import_helper + :synopsis: Support for import tests. + +The :mod:`test.support.import_helper` module provides support for import tests. + +.. versionadded:: 3.10 + + +.. function:: forget(module_name) + + Remove the module named *module_name* from ``sys.modules`` and delete any + byte-compiled files of the module. + + +.. function:: import_fresh_module(name, fresh=(), blocked=(), deprecated=False) + + This function imports and returns a fresh copy of the named Python module + by removing the named module from ``sys.modules`` before doing the import. + Note that unlike :func:`reload`, the original module is not affected by + this operation. + + *fresh* is an iterable of additional module names that are also removed + from the ``sys.modules`` cache before doing the import. + + *blocked* is an iterable of module names that are replaced with ``None`` + in the module cache during the import to ensure that attempts to import + them raise :exc:`ImportError`. + + The named module and any modules named in the *fresh* and *blocked* + parameters are saved before starting the import and then reinserted into + ``sys.modules`` when the fresh import is complete. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is ``True``. + + This function will raise :exc:`ImportError` if the named module cannot be + imported. + + Example use:: + + # Get copies of the warnings module for testing without affecting the + # version being used by the rest of the test suite. One copy uses the + # C implementation, the other is forced to use the pure Python fallback + # implementation + py_warnings = import_fresh_module('warnings', blocked=['_warnings']) + c_warnings = import_fresh_module('warnings', fresh=['_warnings']) + + .. versionadded:: 3.1 + + +.. function:: import_module(name, deprecated=False, *, required_on()) + + This function imports and returns the named module. Unlike a normal + import, this function raises :exc:`unittest.SkipTest` if the module + cannot be imported. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is ``True``. If a module is required on a platform but + optional for others, set *required_on* to an iterable of platform prefixes + which will be compared against :data:`sys.platform`. + + .. versionadded:: 3.1 + + +.. function:: modules_setup() + + Return a copy of :data:`sys.modules`. + + +.. function:: modules_cleanup(oldmodules) + + Remove modules except for *oldmodules* and ``encodings`` in order to + preserve internal cache. + + +.. function:: unload(name) + + Delete *name* from ``sys.modules``. + + +.. function:: make_legacy_pyc(source) + + Move a :pep:`3147`/:pep:`488` pyc file to its legacy pyc location and return the file + system path to the legacy pyc file. The *source* value is the file system + path to the source file. It does not need to exist, however the PEP + 3147/488 pyc file must exist. + + +.. class:: CleanImport(*module_names) + + A context manager to force import to return a new module reference. This + is useful for testing module-level behaviors, such as the emission of a + DeprecationWarning on import. Example usage:: + + with CleanImport('foo'): + importlib.import_module('foo') # New reference. + + +.. class:: DirsOnSysPath(*paths) + + A context manager to temporarily add directories to sys.path. + + This makes a copy of :data:`sys.path`, appends any directories given + as positional arguments, then reverts :data:`sys.path` to the copied + settings when the context ends. + + Note that *all* :data:`sys.path` modifications in the body of the + context manager, including replacement of the object, + will be reverted at the end of the block. + + diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 83b21733de0f1..3778eed62169c 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -8,8 +8,6 @@ import fnmatch import functools import glob -import importlib -import importlib.util import os import re import stat @@ -21,6 +19,11 @@ import unittest import warnings +from .import_helper import ( + CleanImport, DirsOnSysPath, _ignore_deprecated_imports, + _save_and_block_module, _save_and_remove_module, + forget, import_fresh_module, import_module, make_legacy_pyc, + modules_cleanup, modules_setup, unload) from .os_helper import ( FS_NONASCII, SAVEDCWD, TESTFN, TESTFN_NONASCII, TESTFN_UNENCODABLE, TESTFN_UNDECODABLE, @@ -39,10 +42,6 @@ "PIPE_MAX_SIZE", "verbose", "max_memuse", "use_resources", "failfast", # exceptions "Error", "TestFailed", "TestDidNotRun", "ResourceDenied", - # imports - "import_module", "import_fresh_module", "CleanImport", - # modules - "unload", "forget", # io "record_original_stdout", "get_original_stdout", "captured_stdout", "captured_stdin", "captured_stderr", @@ -132,22 +131,6 @@ class ResourceDenied(unittest.SkipTest): and unexpected skips. """ - at contextlib.contextmanager -def _ignore_deprecated_imports(ignore=True): - """Context manager to suppress package and module deprecation - warnings when importing them. - - If ignore is False, this context manager has no effect. - """ - if ignore: - with warnings.catch_warnings(): - warnings.filterwarnings("ignore", ".+ (module|package)", - DeprecationWarning) - yield - else: - yield - - def ignore_warnings(*, category): """Decorator to suppress deprecation warnings. @@ -164,52 +147,6 @@ def wrapper(self, *args, **kwargs): return decorator -def import_module(name, deprecated=False, *, required_on=()): - """Import and return the module to be tested, raising SkipTest if - it is not available. - - If deprecated is True, any module or package deprecation messages - will be suppressed. If a module is required on a platform but optional for - others, set required_on to an iterable of platform prefixes which will be - compared against sys.platform. - """ - with _ignore_deprecated_imports(deprecated): - try: - return importlib.import_module(name) - except ImportError as msg: - if sys.platform.startswith(tuple(required_on)): - raise - raise unittest.SkipTest(str(msg)) - - -def _save_and_remove_module(name, orig_modules): - """Helper function to save and remove a module from sys.modules - - Raise ImportError if the module can't be imported. - """ - # try to import the module and raise an error if it can't be imported - if name not in sys.modules: - __import__(name) - del sys.modules[name] - for modname in list(sys.modules): - if modname == name or modname.startswith(name + '.'): - orig_modules[modname] = sys.modules[modname] - del sys.modules[modname] - -def _save_and_block_module(name, orig_modules): - """Helper function to save and block a module in sys.modules - - Return True if the module was in sys.modules, False otherwise. - """ - saved = True - try: - orig_modules[name] = sys.modules[name] - except KeyError: - saved = False - sys.modules[name] = None - return saved - - def anticipate_failure(condition): """Decorator to mark a test that is known to be broken in some cases @@ -240,56 +177,6 @@ def load_tests(*args): return standard_tests -def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): - """Import and return a module, deliberately bypassing sys.modules. - - This function imports and returns a fresh copy of the named Python module - by removing the named module from sys.modules before doing the import. - Note that unlike reload, the original module is not affected by - this operation. - - *fresh* is an iterable of additional module names that are also removed - from the sys.modules cache before doing the import. - - *blocked* is an iterable of module names that are replaced with None - in the module cache during the import to ensure that attempts to import - them raise ImportError. - - The named module and any modules named in the *fresh* and *blocked* - parameters are saved before starting the import and then reinserted into - sys.modules when the fresh import is complete. - - Module and package deprecation messages are suppressed during this import - if *deprecated* is True. - - This function will raise ImportError if the named module cannot be - imported. - """ - # NOTE: test_heapq, test_json and test_warnings include extra sanity checks - # to make sure that this utility function is working as expected - with _ignore_deprecated_imports(deprecated): - # Keep track of modules saved for later restoration as well - # as those which just need a blocking entry removed - orig_modules = {} - names_to_remove = [] - _save_and_remove_module(name, orig_modules) - try: - for fresh_name in fresh: - _save_and_remove_module(fresh_name, orig_modules) - for blocked_name in blocked: - if not _save_and_block_module(blocked_name, orig_modules): - names_to_remove.append(blocked_name) - fresh_module = importlib.import_module(name) - except ImportError: - fresh_module = None - finally: - for orig_name, module in orig_modules.items(): - sys.modules[orig_name] = module - for name_to_remove in names_to_remove: - del sys.modules[name_to_remove] - return fresh_module - - def get_attribute(obj, name): """Get an attribute, raising SkipTest if AttributeError is raised.""" try: @@ -318,12 +205,6 @@ def record_original_stdout(stdout): def get_original_stdout(): return _original_stdout or sys.stdout -def unload(name): - try: - del sys.modules[name] - except KeyError: - pass - def _force_run(path, func, *args): try: @@ -336,34 +217,6 @@ def _force_run(path, func, *args): return func(*args) -def make_legacy_pyc(source): - """Move a PEP 3147/488 pyc file to its legacy pyc location. - - :param source: The file system path to the source file. The source file - does not need to exist, however the PEP 3147/488 pyc file must exist. - :return: The file system path to the legacy pyc file. - """ - pyc_file = importlib.util.cache_from_source(source) - up_one = os.path.dirname(os.path.abspath(source)) - legacy_pyc = os.path.join(up_one, source + 'c') - os.rename(pyc_file, legacy_pyc) - return legacy_pyc - -def forget(modname): - """'Forget' a module was ever imported. - - This removes the module from sys.modules and deletes any PEP 3147/488 or - legacy .pyc files. - """ - unload(modname) - for dirname in sys.path: - source = os.path.join(dirname, modname + '.py') - # It doesn't matter if they exist or not, unlink all possible - # combinations of PEP 3147/488 and legacy pyc files. - unlink(source + 'c') - for opt in ('', 1, 2): - unlink(importlib.util.cache_from_source(source, optimization=opt)) - # Check whether a gui is actually available def _is_gui_available(): if hasattr(_is_gui_available, 'result'): @@ -870,63 +723,6 @@ def check_no_resource_warning(testcase): yield -class CleanImport(object): - """Context manager to force import to return a new module reference. - - This is useful for testing module-level behaviours, such as - the emission of a DeprecationWarning on import. - - Use like this: - - with CleanImport("foo"): - importlib.import_module("foo") # new reference - """ - - def __init__(self, *module_names): - self.original_modules = sys.modules.copy() - for module_name in module_names: - if module_name in sys.modules: - module = sys.modules[module_name] - # It is possible that module_name is just an alias for - # another module (e.g. stub for modules renamed in 3.x). - # In that case, we also need delete the real module to clear - # the import cache. - if module.__name__ != module_name: - del sys.modules[module.__name__] - del sys.modules[module_name] - - def __enter__(self): - return self - - def __exit__(self, *ignore_exc): - sys.modules.update(self.original_modules) - - -class DirsOnSysPath(object): - """Context manager to temporarily add directories to sys.path. - - This makes a copy of sys.path, appends any directories given - as positional arguments, then reverts sys.path to the copied - settings when the context ends. - - Note that *all* sys.path modifications in the body of the - context manager, including replacement of the object, - will be reverted at the end of the block. - """ - - def __init__(self, *paths): - self.original_value = sys.path[:] - self.original_object = sys.path - sys.path.extend(paths) - - def __enter__(self): - return self - - def __exit__(self, *ignore_exc): - sys.path = self.original_object - sys.path[:] = self.original_value - - class TransientResource(object): """Raise ResourceDenied if an exception is raised while the context manager @@ -1553,24 +1349,6 @@ def print_warning(msg): for line in msg.splitlines(): print(f"Warning -- {line}", file=sys.__stderr__, flush=True) -def modules_setup(): - return sys.modules.copy(), - -def modules_cleanup(oldmodules): - # Encoders/decoders are registered permanently within the internal - # codec cache. If we destroy the corresponding modules their - # globals will be set to None which will trip up the cached functions. - encodings = [(k, v) for k, v in sys.modules.items() - if k.startswith('encodings.')] - sys.modules.clear() - sys.modules.update(encodings) - # XXX: This kind of problem can affect more than just encodings. In particular - # extension modules (such as _ssl) don't cope with reloading properly. - # Really, test modules should be cleaning out the test specific modules they - # know they added (ala test_runpy) rather than relying on this function (as - # test_importhooks and test_pkg do currently). - # Implicitly imported *real* modules should be left alone (see issue 10556). - sys.modules.update(oldmodules) # Flag used by saved_test_environment of test.libregrtest.save_env, # to check if a test modified the environment. The flag should be set to False diff --git a/Lib/test/support/import_helper.py b/Lib/test/support/import_helper.py new file mode 100644 index 0000000000000..5d1e9406879cc --- /dev/null +++ b/Lib/test/support/import_helper.py @@ -0,0 +1,238 @@ +import contextlib +import importlib +import importlib.util +import os +import sys +import unittest +import warnings + +from .os_helper import unlink + + + at contextlib.contextmanager +def _ignore_deprecated_imports(ignore=True): + """Context manager to suppress package and module deprecation + warnings when importing them. + + If ignore is False, this context manager has no effect. + """ + if ignore: + with warnings.catch_warnings(): + warnings.filterwarnings("ignore", ".+ (module|package)", + DeprecationWarning) + yield + else: + yield + + +def unload(name): + try: + del sys.modules[name] + except KeyError: + pass + + +def forget(modname): + """'Forget' a module was ever imported. + + This removes the module from sys.modules and deletes any PEP 3147/488 or + legacy .pyc files. + """ + unload(modname) + for dirname in sys.path: + source = os.path.join(dirname, modname + '.py') + # It doesn't matter if they exist or not, unlink all possible + # combinations of PEP 3147/488 and legacy pyc files. + unlink(source + 'c') + for opt in ('', 1, 2): + unlink(importlib.util.cache_from_source(source, optimization=opt)) + + +def make_legacy_pyc(source): + """Move a PEP 3147/488 pyc file to its legacy pyc location. + + :param source: The file system path to the source file. The source file + does not need to exist, however the PEP 3147/488 pyc file must exist. + :return: The file system path to the legacy pyc file. + """ + pyc_file = importlib.util.cache_from_source(source) + up_one = os.path.dirname(os.path.abspath(source)) + legacy_pyc = os.path.join(up_one, source + 'c') + os.rename(pyc_file, legacy_pyc) + return legacy_pyc + + +def import_module(name, deprecated=False, *, required_on=()): + """Import and return the module to be tested, raising SkipTest if + it is not available. + + If deprecated is True, any module or package deprecation messages + will be suppressed. If a module is required on a platform but optional for + others, set required_on to an iterable of platform prefixes which will be + compared against sys.platform. + """ + with _ignore_deprecated_imports(deprecated): + try: + return importlib.import_module(name) + except ImportError as msg: + if sys.platform.startswith(tuple(required_on)): + raise + raise unittest.SkipTest(str(msg)) + + +def _save_and_remove_module(name, orig_modules): + """Helper function to save and remove a module from sys.modules + + Raise ImportError if the module can't be imported. + """ + # try to import the module and raise an error if it can't be imported + if name not in sys.modules: + __import__(name) + del sys.modules[name] + for modname in list(sys.modules): + if modname == name or modname.startswith(name + '.'): + orig_modules[modname] = sys.modules[modname] + del sys.modules[modname] + + +def _save_and_block_module(name, orig_modules): + """Helper function to save and block a module in sys.modules + + Return True if the module was in sys.modules, False otherwise. + """ + saved = True + try: + orig_modules[name] = sys.modules[name] + except KeyError: + saved = False + sys.modules[name] = None + return saved + + +def import_fresh_module(name, fresh=(), blocked=(), deprecated=False): + """Import and return a module, deliberately bypassing sys.modules. + + This function imports and returns a fresh copy of the named Python module + by removing the named module from sys.modules before doing the import. + Note that unlike reload, the original module is not affected by + this operation. + + *fresh* is an iterable of additional module names that are also removed + from the sys.modules cache before doing the import. + + *blocked* is an iterable of module names that are replaced with None + in the module cache during the import to ensure that attempts to import + them raise ImportError. + + The named module and any modules named in the *fresh* and *blocked* + parameters are saved before starting the import and then reinserted into + sys.modules when the fresh import is complete. + + Module and package deprecation messages are suppressed during this import + if *deprecated* is True. + + This function will raise ImportError if the named module cannot be + imported. + """ + # NOTE: test_heapq, test_json and test_warnings include extra sanity checks + # to make sure that this utility function is working as expected + with _ignore_deprecated_imports(deprecated): + # Keep track of modules saved for later restoration as well + # as those which just need a blocking entry removed + orig_modules = {} + names_to_remove = [] + _save_and_remove_module(name, orig_modules) + try: + for fresh_name in fresh: + _save_and_remove_module(fresh_name, orig_modules) + for blocked_name in blocked: + if not _save_and_block_module(blocked_name, orig_modules): + names_to_remove.append(blocked_name) + fresh_module = importlib.import_module(name) + except ImportError: + fresh_module = None + finally: + for orig_name, module in orig_modules.items(): + sys.modules[orig_name] = module + for name_to_remove in names_to_remove: + del sys.modules[name_to_remove] + return fresh_module + + +class CleanImport(object): + """Context manager to force import to return a new module reference. + + This is useful for testing module-level behaviours, such as + the emission of a DeprecationWarning on import. + + Use like this: + + with CleanImport("foo"): + importlib.import_module("foo") # new reference + """ + + def __init__(self, *module_names): + self.original_modules = sys.modules.copy() + for module_name in module_names: + if module_name in sys.modules: + module = sys.modules[module_name] + # It is possible that module_name is just an alias for + # another module (e.g. stub for modules renamed in 3.x). + # In that case, we also need delete the real module to clear + # the import cache. + if module.__name__ != module_name: + del sys.modules[module.__name__] + del sys.modules[module_name] + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.modules.update(self.original_modules) + + +class DirsOnSysPath(object): + """Context manager to temporarily add directories to sys.path. + + This makes a copy of sys.path, appends any directories given + as positional arguments, then reverts sys.path to the copied + settings when the context ends. + + Note that *all* sys.path modifications in the body of the + context manager, including replacement of the object, + will be reverted at the end of the block. + """ + + def __init__(self, *paths): + self.original_value = sys.path[:] + self.original_object = sys.path + sys.path.extend(paths) + + def __enter__(self): + return self + + def __exit__(self, *ignore_exc): + sys.path = self.original_object + sys.path[:] = self.original_value + + +def modules_setup(): + return sys.modules.copy(), + + +def modules_cleanup(oldmodules): + # Encoders/decoders are registered permanently within the internal + # codec cache. If we destroy the corresponding modules their + # globals will be set to None which will trip up the cached functions. + encodings = [(k, v) for k, v in sys.modules.items() + if k.startswith('encodings.')] + sys.modules.clear() + sys.modules.update(encodings) + # XXX: This kind of problem can affect more than just encodings. + # In particular extension modules (such as _ssl) don't cope + # with reloading properly. Really, test modules should be cleaning + # out the test specific modules they know they added (ala test_runpy) + # rather than relying on this function (as test_importhooks and test_pkg + # do currently). Implicitly imported *real* modules should be left alone + # (see issue 10556). + sys.modules.update(oldmodules) From webhook-mailer at python.org Wed Jun 10 19:56:13 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Wed, 10 Jun 2020 23:56:13 -0000 Subject: [Python-checkins] bpo-40847: Consider a line with only a LINECONT a blank line (GH-20769) Message-ID: https://github.com/python/cpython/commit/896f4cf63f9ab93e30572d879a5719d5aa2499fb commit: 896f4cf63f9ab93e30572d879a5719d5aa2499fb branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-11T00:56:08+01:00 summary: bpo-40847: Consider a line with only a LINECONT a blank line (GH-20769) A line with only a line continuation character should be considered a blank line at tokenizer level so that only a single NEWLINE token gets emitted. The old parser was working around the issue, but the new parser threw a `SyntaxError` for valid input. For example, an empty line following a line continuation character was interpreted as a `SyntaxError`. Co-authored-by: Pablo Galindo files: A Misc/NEWS.d/next/Core and Builtins/2020-06-09-23-52-32.bpo-40847.4XAACw.rst M Lib/test/test_peg_parser.py M Lib/test/test_syntax.py M Parser/tokenizer.c diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py index 6ccb2573176bb..fae85e323da04 100644 --- a/Lib/test/test_peg_parser.py +++ b/Lib/test/test_peg_parser.py @@ -153,6 +153,13 @@ def f(): ('dict_comp', '{x:1 for x in a}'), ('dict_comp_if', '{x:1+2 for x in a if b}'), ('dict_empty', '{}'), + ('empty_line_after_linecont', + r''' + pass + \ + + pass + '''), ('for', ''' for i in a: diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index f41426a4e9d2d..0c207ec8fc07c 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -858,6 +858,20 @@ def test_kwargs_last3(self): "iterable argument unpacking follows " "keyword argument unpacking") + def test_empty_line_after_linecont(self): + # See issue-40847 + s = r"""\ +pass + \ + +pass +""" + try: + compile(s, '', 'exec') + except SyntaxError: + self.fail("Empty line after a line continuation character is valid.") + + def test_main(): support.run_unittest(SyntaxTestCase) from test import test_syntax diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-09-23-52-32.bpo-40847.4XAACw.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-09-23-52-32.bpo-40847.4XAACw.rst new file mode 100644 index 0000000000000..0b489f2483215 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-09-23-52-32.bpo-40847.4XAACw.rst @@ -0,0 +1,4 @@ +Fix a bug where a line with only a line continuation character is not considered a blank line at tokenizer level. +In such cases, more than a single `NEWLINE` token was emitted. The old parser was working around the issue, +but the new parser threw a :exc:`SyntaxError` for valid input due to this. For example, an empty line following +a line continuation character was interpreted as a :exc:`SyntaxError`. diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index cebfadc8e89f3..d461e4e24e721 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1203,8 +1203,9 @@ tok_get(struct tok_state *tok, const char **p_start, const char **p_end) } } tok_backup(tok, c); - if (c == '#' || c == '\n') { + if (c == '#' || c == '\n' || c == '\\') { /* Lines with only whitespace and/or comments + and/or a line continuation character shouldn't affect the indentation and are not passed to the parser as NEWLINE tokens, except *totally* empty lines in interactive From webhook-mailer at python.org Thu Jun 11 02:18:26 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Thu, 11 Jun 2020 06:18:26 -0000 Subject: [Python-checkins] Collections module reformatting and minor code refactoring (GH-20772) Message-ID: https://github.com/python/cpython/commit/31d17798d6567036d3ac2771555a919b3628962f commit: 31d17798d6567036d3ac2771555a919b3628962f branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-10T23:17:58-07:00 summary: Collections module reformatting and minor code refactoring (GH-20772) files: M Lib/collections/__init__.py diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 6a06cc6a64f16..42d0ec777c3f7 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -14,17 +14,30 @@ ''' -__all__ = ['deque', 'defaultdict', 'namedtuple', 'UserDict', 'UserList', - 'UserString', 'Counter', 'OrderedDict', 'ChainMap'] +__all__ = [ + 'ChainMap', + 'Counter', + 'OrderedDict', + 'UserDict', + 'UserList', + 'UserString', + 'defaultdict', + 'deque', + 'namedtuple', +] import _collections_abc -from operator import itemgetter as _itemgetter, eq as _eq -from keyword import iskeyword as _iskeyword -import sys as _sys import heapq as _heapq -from _weakref import proxy as _proxy -from itertools import repeat as _repeat, chain as _chain, starmap as _starmap +import sys as _sys + +from itertools import chain as _chain +from itertools import repeat as _repeat +from itertools import starmap as _starmap +from keyword import iskeyword as _iskeyword +from operator import eq as _eq +from operator import itemgetter as _itemgetter from reprlib import recursive_repr as _recursive_repr +from _weakref import proxy as _proxy try: from _collections import deque @@ -54,6 +67,7 @@ def __getattr__(name): return obj raise AttributeError(f'module {__name__!r} has no attribute {name!r}') + ################################################################################ ### OrderedDict ################################################################################ @@ -408,10 +422,13 @@ def namedtuple(typename, field_names, *, rename=False, defaults=None, module=Non # Create all the named tuple methods to be added to the class namespace - s = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))' - namespace = {'_tuple_new': tuple_new, '__builtins__': None, - '__name__': f'namedtuple_{typename}'} - __new__ = eval(s, namespace) + namespace = { + '_tuple_new': tuple_new, + '__builtins__': None, + '__name__': f'namedtuple_{typename}', + } + code = f'lambda _cls, {arg_list}: _tuple_new(_cls, ({arg_list}))' + __new__ = eval(code, namespace) __new__.__name__ = '__new__' __new__.__doc__ = f'Create new instance of {typename}({arg_list})' if defaults is not None: @@ -449,8 +466,14 @@ def __getnewargs__(self): return _tuple(self) # Modify function metadata to help with introspection and debugging - for method in (__new__, _make.__func__, _replace, - __repr__, _asdict, __getnewargs__): + for method in ( + __new__, + _make.__func__, + _replace, + __repr__, + _asdict, + __getnewargs__, + ): method.__qualname__ = f'{typename}.{method.__name__}' # Build-up the class namespace dictionary @@ -566,7 +589,7 @@ def __init__(self, iterable=None, /, **kwds): >>> c = Counter(a=4, b=2) # a new counter from keyword args ''' - super(Counter, self).__init__() + super().__init__() self.update(iterable, **kwds) def __missing__(self, key): @@ -650,7 +673,8 @@ def update(self, iterable=None, /, **kwds): for elem, count in iterable.items(): self[elem] = count + self_get(elem, 0) else: - super(Counter, self).update(iterable) # fast path when counter is empty + # fast path when counter is empty + super().update(iterable) else: _count_elements(self, iterable) if kwds: @@ -733,13 +757,14 @@ def __gt__(self, other): def __repr__(self): if not self: - return '%s()' % self.__class__.__name__ + return f'{self.__class__.__name__}()' try: - items = ', '.join(map('%r: %r'.__mod__, self.most_common())) - return '%s({%s})' % (self.__class__.__name__, items) + # dict() preserves the ordering returned by most_common() + d = dict(self.most_common()) except TypeError: # handle case where values are not orderable - return '{0}({1!r})'.format(self.__class__.__name__, dict(self)) + d = dict(self) + return f'{self.__class__.__name__}({d!r})' # Multiset-style mathematical operations discussed in: # Knuth TAOCP Volume II section 4.6.3 exercise 19 @@ -1018,7 +1043,7 @@ def __delitem__(self, key): try: del self.maps[0][key] except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError(f'Key not found in the first mapping: {key!r}') def popitem(self): 'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.' @@ -1032,30 +1057,30 @@ def pop(self, key, *args): try: return self.maps[0].pop(key, *args) except KeyError: - raise KeyError('Key not found in the first mapping: {!r}'.format(key)) + raise KeyError(f'Key not found in the first mapping: {key!r}') def clear(self): 'Clear maps[0], leaving maps[1:] intact.' self.maps[0].clear() def __ior__(self, other): - self.maps[0] |= other + self.maps[0].update(other) return self def __or__(self, other): - if isinstance(other, _collections_abc.Mapping): - m = self.maps[0].copy() - m.update(other) - return self.__class__(m, *self.maps[1:]) - return NotImplemented + if not isinstance(other, _collections_abc.Mapping): + return NotImplemented + m = self.copy() + m.maps[0].update(other) + return m def __ror__(self, other): - if isinstance(other, _collections_abc.Mapping): - m = dict(other) - for child in reversed(self.maps): - m.update(child) - return self.__class__(m) - return NotImplemented + if not isinstance(other, _collections_abc.Mapping): + return NotImplemented + m = dict(other) + for child in reversed(self.maps): + m.update(child) + return self.__class__(m) ################################################################################ @@ -1072,15 +1097,22 @@ def __init__(self, dict=None, /, **kwargs): if kwargs: self.update(kwargs) - def __len__(self): return len(self.data) + def __len__(self): + return len(self.data) + def __getitem__(self, key): if key in self.data: return self.data[key] if hasattr(self.__class__, "__missing__"): return self.__class__.__missing__(self, key) raise KeyError(key) - def __setitem__(self, key, item): self.data[key] = item - def __delitem__(self, key): del self.data[key] + + def __setitem__(self, key, item): + self.data[key] = item + + def __delitem__(self, key): + del self.data[key] + def __iter__(self): return iter(self.data) @@ -1089,7 +1121,8 @@ def __contains__(self, key): return key in self.data # Now, add the methods in dicts but not in MutableMapping - def __repr__(self): return repr(self.data) + def __repr__(self): + return repr(self.data) def __or__(self, other): if isinstance(other, UserDict): @@ -1097,12 +1130,14 @@ def __or__(self, other): if isinstance(other, dict): return self.__class__(self.data | other) return NotImplemented + def __ror__(self, other): if isinstance(other, UserDict): return self.__class__(other.data | self.data) if isinstance(other, dict): return self.__class__(other | self.data) return NotImplemented + def __ior__(self, other): if isinstance(other, UserDict): self.data |= other.data @@ -1138,13 +1173,13 @@ def fromkeys(cls, iterable, value=None): return d - ################################################################################ ### UserList ################################################################################ class UserList(_collections_abc.MutableSequence): """A more or less complete user-defined wrapper around list objects.""" + def __init__(self, initlist=None): self.data = [] if initlist is not None: @@ -1155,35 +1190,60 @@ def __init__(self, initlist=None): self.data[:] = initlist.data[:] else: self.data = list(initlist) - def __repr__(self): return repr(self.data) - def __lt__(self, other): return self.data < self.__cast(other) - def __le__(self, other): return self.data <= self.__cast(other) - def __eq__(self, other): return self.data == self.__cast(other) - def __gt__(self, other): return self.data > self.__cast(other) - def __ge__(self, other): return self.data >= self.__cast(other) + + def __repr__(self): + return repr(self.data) + + def __lt__(self, other): + return self.data < self.__cast(other) + + def __le__(self, other): + return self.data <= self.__cast(other) + + def __eq__(self, other): + return self.data == self.__cast(other) + + def __gt__(self, other): + return self.data > self.__cast(other) + + def __ge__(self, other): + return self.data >= self.__cast(other) + def __cast(self, other): return other.data if isinstance(other, UserList) else other - def __contains__(self, item): return item in self.data - def __len__(self): return len(self.data) + + def __contains__(self, item): + return item in self.data + + def __len__(self): + return len(self.data) + def __getitem__(self, i): if isinstance(i, slice): return self.__class__(self.data[i]) else: return self.data[i] - def __setitem__(self, i, item): self.data[i] = item - def __delitem__(self, i): del self.data[i] + + def __setitem__(self, i, item): + self.data[i] = item + + def __delitem__(self, i): + del self.data[i] + def __add__(self, other): if isinstance(other, UserList): return self.__class__(self.data + other.data) elif isinstance(other, type(self.data)): return self.__class__(self.data + other) return self.__class__(self.data + list(other)) + def __radd__(self, other): if isinstance(other, UserList): return self.__class__(other.data + self.data) elif isinstance(other, type(self.data)): return self.__class__(other + self.data) return self.__class__(list(other) + self.data) + def __iadd__(self, other): if isinstance(other, UserList): self.data += other.data @@ -1192,28 +1252,53 @@ def __iadd__(self, other): else: self.data += list(other) return self + def __mul__(self, n): - return self.__class__(self.data*n) + return self.__class__(self.data * n) + __rmul__ = __mul__ + def __imul__(self, n): self.data *= n return self + def __copy__(self): inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) # Create a copy and avoid triggering descriptors inst.__dict__["data"] = self.__dict__["data"][:] return inst - def append(self, item): self.data.append(item) - def insert(self, i, item): self.data.insert(i, item) - def pop(self, i=-1): return self.data.pop(i) - def remove(self, item): self.data.remove(item) - def clear(self): self.data.clear() - def copy(self): return self.__class__(self) - def count(self, item): return self.data.count(item) - def index(self, item, *args): return self.data.index(item, *args) - def reverse(self): self.data.reverse() - def sort(self, /, *args, **kwds): self.data.sort(*args, **kwds) + + def append(self, item): + self.data.append(item) + + def insert(self, i, item): + self.data.insert(i, item) + + def pop(self, i=-1): + return self.data.pop(i) + + def remove(self, item): + self.data.remove(item) + + def clear(self): + self.data.clear() + + def copy(self): + return self.__class__(self) + + def count(self, item): + return self.data.count(item) + + def index(self, item, *args): + return self.data.index(item, *args) + + def reverse(self): + self.data.reverse() + + def sort(self, /, *args, **kwds): + self.data.sort(*args, **kwds) + def extend(self, other): if isinstance(other, UserList): self.data.extend(other.data) @@ -1221,12 +1306,12 @@ def extend(self, other): self.data.extend(other) - ################################################################################ ### UserString ################################################################################ class UserString(_collections_abc.Sequence): + def __init__(self, seq): if isinstance(seq, str): self.data = seq @@ -1234,12 +1319,25 @@ def __init__(self, seq): self.data = seq.data[:] else: self.data = str(seq) - def __str__(self): return str(self.data) - def __repr__(self): return repr(self.data) - def __int__(self): return int(self.data) - def __float__(self): return float(self.data) - def __complex__(self): return complex(self.data) - def __hash__(self): return hash(self.data) + + def __str__(self): + return str(self.data) + + def __repr__(self): + return repr(self.data) + + def __int__(self): + return int(self.data) + + def __float__(self): + return float(self.data) + + def __complex__(self): + return complex(self.data) + + def __hash__(self): + return hash(self.data) + def __getnewargs__(self): return (self.data[:],) @@ -1247,18 +1345,22 @@ def __eq__(self, string): if isinstance(string, UserString): return self.data == string.data return self.data == string + def __lt__(self, string): if isinstance(string, UserString): return self.data < string.data return self.data < string + def __le__(self, string): if isinstance(string, UserString): return self.data <= string.data return self.data <= string + def __gt__(self, string): if isinstance(string, UserString): return self.data > string.data return self.data > string + def __ge__(self, string): if isinstance(string, UserString): return self.data >= string.data @@ -1269,110 +1371,188 @@ def __contains__(self, char): char = char.data return char in self.data - def __len__(self): return len(self.data) - def __getitem__(self, index): return self.__class__(self.data[index]) + def __len__(self): + return len(self.data) + + def __getitem__(self, index): + return self.__class__(self.data[index]) + def __add__(self, other): if isinstance(other, UserString): return self.__class__(self.data + other.data) elif isinstance(other, str): return self.__class__(self.data + other) return self.__class__(self.data + str(other)) + def __radd__(self, other): if isinstance(other, str): return self.__class__(other + self.data) return self.__class__(str(other) + self.data) + def __mul__(self, n): - return self.__class__(self.data*n) + return self.__class__(self.data * n) + __rmul__ = __mul__ + def __mod__(self, args): return self.__class__(self.data % args) + def __rmod__(self, template): return self.__class__(str(template) % self) + # the following methods are defined in alphabetical order: - def capitalize(self): return self.__class__(self.data.capitalize()) + def capitalize(self): + return self.__class__(self.data.capitalize()) + def casefold(self): return self.__class__(self.data.casefold()) + def center(self, width, *args): return self.__class__(self.data.center(width, *args)) + def count(self, sub, start=0, end=_sys.maxsize): if isinstance(sub, UserString): sub = sub.data return self.data.count(sub, start, end) + def removeprefix(self, prefix, /): if isinstance(prefix, UserString): prefix = prefix.data return self.__class__(self.data.removeprefix(prefix)) + def removesuffix(self, suffix, /): if isinstance(suffix, UserString): suffix = suffix.data return self.__class__(self.data.removesuffix(suffix)) + def encode(self, encoding='utf-8', errors='strict'): encoding = 'utf-8' if encoding is None else encoding errors = 'strict' if errors is None else errors return self.data.encode(encoding, errors) + def endswith(self, suffix, start=0, end=_sys.maxsize): return self.data.endswith(suffix, start, end) + def expandtabs(self, tabsize=8): return self.__class__(self.data.expandtabs(tabsize)) + def find(self, sub, start=0, end=_sys.maxsize): if isinstance(sub, UserString): sub = sub.data return self.data.find(sub, start, end) + def format(self, /, *args, **kwds): return self.data.format(*args, **kwds) + def format_map(self, mapping): return self.data.format_map(mapping) + def index(self, sub, start=0, end=_sys.maxsize): return self.data.index(sub, start, end) - def isalpha(self): return self.data.isalpha() - def isalnum(self): return self.data.isalnum() - def isascii(self): return self.data.isascii() - def isdecimal(self): return self.data.isdecimal() - def isdigit(self): return self.data.isdigit() - def isidentifier(self): return self.data.isidentifier() - def islower(self): return self.data.islower() - def isnumeric(self): return self.data.isnumeric() - def isprintable(self): return self.data.isprintable() - def isspace(self): return self.data.isspace() - def istitle(self): return self.data.istitle() - def isupper(self): return self.data.isupper() - def join(self, seq): return self.data.join(seq) + + def isalpha(self): + return self.data.isalpha() + + def isalnum(self): + return self.data.isalnum() + + def isascii(self): + return self.data.isascii() + + def isdecimal(self): + return self.data.isdecimal() + + def isdigit(self): + return self.data.isdigit() + + def isidentifier(self): + return self.data.isidentifier() + + def islower(self): + return self.data.islower() + + def isnumeric(self): + return self.data.isnumeric() + + def isprintable(self): + return self.data.isprintable() + + def isspace(self): + return self.data.isspace() + + def istitle(self): + return self.data.istitle() + + def isupper(self): + return self.data.isupper() + + def join(self, seq): + return self.data.join(seq) + def ljust(self, width, *args): return self.__class__(self.data.ljust(width, *args)) - def lower(self): return self.__class__(self.data.lower()) - def lstrip(self, chars=None): return self.__class__(self.data.lstrip(chars)) + + def lower(self): + return self.__class__(self.data.lower()) + + def lstrip(self, chars=None): + return self.__class__(self.data.lstrip(chars)) + maketrans = str.maketrans + def partition(self, sep): return self.data.partition(sep) + def replace(self, old, new, maxsplit=-1): if isinstance(old, UserString): old = old.data if isinstance(new, UserString): new = new.data return self.__class__(self.data.replace(old, new, maxsplit)) + def rfind(self, sub, start=0, end=_sys.maxsize): if isinstance(sub, UserString): sub = sub.data return self.data.rfind(sub, start, end) + def rindex(self, sub, start=0, end=_sys.maxsize): return self.data.rindex(sub, start, end) + def rjust(self, width, *args): return self.__class__(self.data.rjust(width, *args)) + def rpartition(self, sep): return self.data.rpartition(sep) + def rstrip(self, chars=None): return self.__class__(self.data.rstrip(chars)) + def split(self, sep=None, maxsplit=-1): return self.data.split(sep, maxsplit) + def rsplit(self, sep=None, maxsplit=-1): return self.data.rsplit(sep, maxsplit) - def splitlines(self, keepends=False): return self.data.splitlines(keepends) + + def splitlines(self, keepends=False): + return self.data.splitlines(keepends) + def startswith(self, prefix, start=0, end=_sys.maxsize): return self.data.startswith(prefix, start, end) - def strip(self, chars=None): return self.__class__(self.data.strip(chars)) - def swapcase(self): return self.__class__(self.data.swapcase()) - def title(self): return self.__class__(self.data.title()) + + def strip(self, chars=None): + return self.__class__(self.data.strip(chars)) + + def swapcase(self): + return self.__class__(self.data.swapcase()) + + def title(self): + return self.__class__(self.data.title()) + def translate(self, *args): return self.__class__(self.data.translate(*args)) - def upper(self): return self.__class__(self.data.upper()) - def zfill(self, width): return self.__class__(self.data.zfill(width)) + + def upper(self): + return self.__class__(self.data.upper()) + + def zfill(self, width): + return self.__class__(self.data.zfill(width)) From webhook-mailer at python.org Thu Jun 11 05:39:25 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Thu, 11 Jun 2020 09:39:25 -0000 Subject: [Python-checkins] bpo-40925: Remove unused stack macro SET_VALUE (GH-20783) Message-ID: https://github.com/python/cpython/commit/33faf5c4f43e24766cf567bec89ad4c7f1491ff7 commit: 33faf5c4f43e24766cf567bec89ad4c7f1491ff7 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-11T10:39:18+01:00 summary: bpo-40925: Remove unused stack macro SET_VALUE (GH-20783) files: M Python/ceval.c diff --git a/Python/ceval.c b/Python/ceval.c index 9f2cbb06e606a..2bd7cb3ef5313 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -1157,7 +1157,6 @@ _PyEval_EvalFrameDefault(PyThreadState *tstate, PyFrameObject *f, int throwflag) #define SET_SECOND(v) (stack_pointer[-2] = (v)) #define SET_THIRD(v) (stack_pointer[-3] = (v)) #define SET_FOURTH(v) (stack_pointer[-4] = (v)) -#define SET_VALUE(n, v) (stack_pointer[-(n)] = (v)) #define BASIC_STACKADJ(n) (stack_pointer += n) #define BASIC_PUSH(v) (*stack_pointer++ = (v)) #define BASIC_POP() (*--stack_pointer) From webhook-mailer at python.org Thu Jun 11 08:45:23 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Thu, 11 Jun 2020 12:45:23 -0000 Subject: [Python-checkins] bpo-40939: Generate keyword.py using the new parser (GH-20800) Message-ID: https://github.com/python/cpython/commit/9727694f08cad4b019d2939224e3416312b1c0e1 commit: 9727694f08cad4b019d2939224e3416312b1c0e1 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-11T13:45:15+01:00 summary: bpo-40939: Generate keyword.py using the new parser (GH-20800) files: A Misc/NEWS.d/next/Library/2020-06-11-11-07-10.bpo-40939.-D5Asl.rst A Tools/peg_generator/pegen/keywordgen.py M Lib/keyword.py M Lib/pydoc.py M Makefile.pre.in M PCbuild/regen.vcxproj diff --git a/Lib/keyword.py b/Lib/keyword.py index ddcbb25d3d3f5..afc3db3942ccb 100644 --- a/Lib/keyword.py +++ b/Lib/keyword.py @@ -1,13 +1,14 @@ -"""Keywords (from "Grammar/Grammar") +"""Keywords (from "Grammar/python.gram") This file is automatically generated; please don't muck it up! To update the symbols in this file, 'cd' to the top directory of the python source tree and run: - python3 -m Parser.pgen.keywordgen Grammar/Grammar \ - Grammar/Tokens \ - Lib/keyword.py + PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \ + Grammar/Grammar \ + Grammar/Tokens \ + Lib/keyword.py Alternatively, you can run 'make regen-keyword'. """ @@ -18,6 +19,7 @@ 'False', 'None', 'True', + '__new_parser__', 'and', 'as', 'assert', diff --git a/Lib/pydoc.py b/Lib/pydoc.py index 628f9fc7d1d1e..a5368bf8bfe55 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1817,6 +1817,7 @@ class Helper: 'False': '', 'None': '', 'True': '', + '__new_parser__': '', 'and': 'BOOLEAN', 'as': 'with', 'assert': ('assert', ''), diff --git a/Makefile.pre.in b/Makefile.pre.in index 7c16d2905fbf4..9a82729aa0f21 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -895,9 +895,10 @@ regen-token: .PHONY: regen-keyword regen-keyword: - # Regenerate Lib/keyword.py from Grammar/Grammar and Grammar/Tokens - # using Parser/pgen - PYTHONPATH=$(srcdir) $(PYTHON_FOR_REGEN) -m Parser.pgen.keywordgen $(srcdir)/Grammar/Grammar \ + # Regenerate Lib/keyword.py from Grammar/python.gram and Grammar/Tokens + # using Tools/peg_generator/pegen + PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen.keywordgen \ + $(srcdir)/Grammar/python.gram \ $(srcdir)/Grammar/Tokens \ $(srcdir)/Lib/keyword.py.new $(UPDATE_FILE) $(srcdir)/Lib/keyword.py $(srcdir)/Lib/keyword.py.new diff --git a/Misc/NEWS.d/next/Library/2020-06-11-11-07-10.bpo-40939.-D5Asl.rst b/Misc/NEWS.d/next/Library/2020-06-11-11-07-10.bpo-40939.-D5Asl.rst new file mode 100644 index 0000000000000..0e831129dd87e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-11-11-07-10.bpo-40939.-D5Asl.rst @@ -0,0 +1 @@ +Use the new PEG parser when generating the stdlib :mod:`keyword` module. \ No newline at end of file diff --git a/PCbuild/regen.vcxproj b/PCbuild/regen.vcxproj index d46fb997dbd79..564a4dd71188c 100644 --- a/PCbuild/regen.vcxproj +++ b/PCbuild/regen.vcxproj @@ -205,8 +205,9 @@ - - + + + diff --git a/Tools/peg_generator/pegen/keywordgen.py b/Tools/peg_generator/pegen/keywordgen.py new file mode 100644 index 0000000000000..279c34b6dae5b --- /dev/null +++ b/Tools/peg_generator/pegen/keywordgen.py @@ -0,0 +1,73 @@ +"""Generate Lib/keyword.py from the Grammar and Tokens files using pgen""" + +import argparse + +from .build import build_parser, generate_token_definitions +from .c_generator import CParserGenerator + +TEMPLATE = r''' +"""Keywords (from "Grammar/python.gram") + +This file is automatically generated; please don't muck it up! + +To update the symbols in this file, 'cd' to the top directory of +the python source tree and run: + + PYTHONPATH=Tools/peg_generator python3 -m pegen.keywordgen \ + Grammar/Grammar \ + Grammar/Tokens \ + Lib/keyword.py + +Alternatively, you can run 'make regen-keyword'. +""" + +__all__ = ["iskeyword", "kwlist"] + +kwlist = [ + {keywords} +] + +iskeyword = frozenset(kwlist).__contains__ +'''.lstrip() + +EXTRA_KEYWORDS = ["async", "await"] + + +def main(): + parser = argparse.ArgumentParser( + description="Generate the Lib/keywords.py file from the grammar." + ) + parser.add_argument( + "grammar", type=str, help="The file with the grammar definition in PEG format" + ) + parser.add_argument( + "tokens_file", + type=argparse.FileType("r"), + help="The file with the token definitions" + ) + parser.add_argument( + "keyword_file", + type=argparse.FileType("w"), + help="The path to write the keyword definitions", + ) + args = parser.parse_args() + + grammar, _, _ = build_parser(args.grammar) + with args.tokens_file as tok_file: + all_tokens, exact_tok, non_exact_tok = generate_token_definitions(tok_file) + gen: ParserGenerator = CParserGenerator( + grammar, all_tokens, exact_tok, non_exact_tok, file=None + ) + gen.collect_todo() + + with args.keyword_file as thefile: + all_keywords = sorted( + list(gen.callmakervisitor.keyword_cache.keys()) + EXTRA_KEYWORDS + ) + + keywords = ",\n ".join(map(repr, all_keywords)) + thefile.write(TEMPLATE.format(keywords=keywords)) + + +if __name__ == "__main__": + main() From webhook-mailer at python.org Thu Jun 11 09:48:08 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Jun 2020 13:48:08 -0000 Subject: [Python-checkins] bpo-34401: Fix test_gdb for HP GDB version string (GH-20804) Message-ID: https://github.com/python/cpython/commit/b2dca49ca3769cb60713f5c2b43e5d5bbdc1f9c7 commit: b2dca49ca3769cb60713f5c2b43e5d5bbdc1f9c7 branch: master author: Victor Stinner committer: GitHub date: 2020-06-11T15:48:03+02:00 summary: bpo-34401: Fix test_gdb for HP GDB version string (GH-20804) The GDB provided by HPE on HP-UX contains a modified version string. Therefore the tests fail. Adapt the regex to match that string. Patch by Michael Osipov. Co-Authored-by: Michael Osipov files: A Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst M Lib/test/test_gdb.py diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py index 210cd0d3787a8..22c75bae98721 100644 --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -39,7 +39,8 @@ def get_gdb_version(): # 'GNU gdb (GDB) Fedora 7.9.1-17.fc22\n' -> 7.9 # 'GNU gdb 6.1.1 [FreeBSD]\n' -> 6.1 # 'GNU gdb (GDB) Fedora (7.5.1-37.fc18)\n' -> 7.5 - match = re.search(r"^GNU gdb.*?\b(\d+)\.(\d+)", version) + # 'HP gdb 6.7 for HP Itanium (32 or 64 bit) and target HP-UX 11iv2 and 11iv3.\n' -> 6.7 + match = re.search(r"^(?:GNU|HP) gdb.*?\b(\d+)\.(\d+)", version) if match is None: raise Exception("unable to parse GDB version: %r" % version) return (version, int(match.group(1)), int(match.group(2))) diff --git a/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst b/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst new file mode 100644 index 0000000000000..1b28d94c056d4 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst @@ -0,0 +1 @@ +Make test_gdb properly run on HP-UX. Patch by Michael Osipov. From webhook-mailer at python.org Thu Jun 11 10:07:44 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Jun 2020 14:07:44 -0000 Subject: [Python-checkins] bpo-34401: Fix test_gdb for HP GDB version string (GH-20804) Message-ID: https://github.com/python/cpython/commit/5b8e3a533560c39eb40b2fb950d2b14caacfaf6a commit: 5b8e3a533560c39eb40b2fb950d2b14caacfaf6a branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-11T07:07:39-07:00 summary: bpo-34401: Fix test_gdb for HP GDB version string (GH-20804) The GDB provided by HPE on HP-UX contains a modified version string. Therefore the tests fail. Adapt the regex to match that string. Patch by Michael Osipov. Co-Authored-by: Michael Osipov (cherry picked from commit b2dca49ca3769cb60713f5c2b43e5d5bbdc1f9c7) Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst M Lib/test/test_gdb.py diff --git a/Lib/test/test_gdb.py b/Lib/test/test_gdb.py index f043c9256e02f..d90ca5a51ae1b 100644 --- a/Lib/test/test_gdb.py +++ b/Lib/test/test_gdb.py @@ -39,7 +39,8 @@ def get_gdb_version(): # 'GNU gdb (GDB) Fedora 7.9.1-17.fc22\n' -> 7.9 # 'GNU gdb 6.1.1 [FreeBSD]\n' -> 6.1 # 'GNU gdb (GDB) Fedora (7.5.1-37.fc18)\n' -> 7.5 - match = re.search(r"^GNU gdb.*?\b(\d+)\.(\d+)", version) + # 'HP gdb 6.7 for HP Itanium (32 or 64 bit) and target HP-UX 11iv2 and 11iv3.\n' -> 6.7 + match = re.search(r"^(?:GNU|HP) gdb.*?\b(\d+)\.(\d+)", version) if match is None: raise Exception("unable to parse GDB version: %r" % version) return (version, int(match.group(1)), int(match.group(2))) diff --git a/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst b/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst new file mode 100644 index 0000000000000..1b28d94c056d4 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst @@ -0,0 +1 @@ +Make test_gdb properly run on HP-UX. Patch by Michael Osipov. From webhook-mailer at python.org Thu Jun 11 11:29:02 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Jun 2020 15:29:02 -0000 Subject: [Python-checkins] bpo-40947: getpath.c uses PyConfig.platlibdir (GH-20807) Message-ID: https://github.com/python/cpython/commit/d72b9644a3e6eec83be48b1ebc2ec6ca776134d3 commit: d72b9644a3e6eec83be48b1ebc2ec6ca776134d3 branch: master author: Victor Stinner committer: GitHub date: 2020-06-11T17:28:52+02:00 summary: bpo-40947: getpath.c uses PyConfig.platlibdir (GH-20807) Followup of bpo-40854, there is one remaining usage of PLATLIBDIR which should be replaced by config->platlibdir. test_sys checks that sys.platlibdir attribute exists and is a string. Update Makefile: getpath.c and sysmodule.c no longer need PLATLIBDIR macro, PyConfig.platlibdir member is used instead. Co-authored-by: Sandro Mani files: A Misc/NEWS.d/next/Core and Builtins/2020-06-11-16-06-49.bpo-40947.72cZcR.rst M Lib/test/test_sys.py M Makefile.pre.in M Modules/getpath.c diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 2f93eaae560db..194128e5c6bf2 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -487,6 +487,7 @@ def test_attributes(self): self.assertIsInstance(sys.platform, str) self.assertIsInstance(sys.prefix, str) self.assertIsInstance(sys.base_prefix, str) + self.assertIsInstance(sys.platlibdir, str) self.assertIsInstance(sys.version, str) vi = sys.version_info self.assertIsInstance(vi[:], tuple) diff --git a/Makefile.pre.in b/Makefile.pre.in index 9a82729aa0f21..5972dc7b49522 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -775,7 +775,6 @@ Modules/getpath.o: $(srcdir)/Modules/getpath.c Makefile -DEXEC_PREFIX='"$(exec_prefix)"' \ -DVERSION='"$(VERSION)"' \ -DVPATH='"$(VPATH)"' \ - -DPLATLIBDIR='"$(PLATLIBDIR)"' \ -o $@ $(srcdir)/Modules/getpath.c Programs/python.o: $(srcdir)/Programs/python.c @@ -807,7 +806,6 @@ Python/dynload_hpux.o: $(srcdir)/Python/dynload_hpux.c Makefile Python/sysmodule.o: $(srcdir)/Python/sysmodule.c Makefile $(srcdir)/Include/pydtrace.h $(CC) -c $(PY_CORE_CFLAGS) \ -DABIFLAGS='"$(ABIFLAGS)"' \ - -DPLATLIBDIR='"$(PLATLIBDIR)"' \ $(MULTIARCH_CPPFLAGS) \ -o $@ $(srcdir)/Python/sysmodule.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-11-16-06-49.bpo-40947.72cZcR.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-11-16-06-49.bpo-40947.72cZcR.rst new file mode 100644 index 0000000000000..e7dfe06531a4d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-11-16-06-49.bpo-40947.72cZcR.rst @@ -0,0 +1,2 @@ +The Python :ref:`Path Configuration ` now takes +:c:member:`PyConfig.platlibdir` in account. diff --git a/Modules/getpath.c b/Modules/getpath.c index 469c9ca010640..f7a6dd4044305 100644 --- a/Modules/getpath.c +++ b/Modules/getpath.c @@ -130,7 +130,7 @@ typedef struct { wchar_t *exec_prefix_macro; /* EXEC_PREFIX macro */ wchar_t *vpath_macro; /* VPATH macro */ - wchar_t *lib_python; /* "lib/pythonX.Y" */ + wchar_t *lib_python; /* / "pythonX.Y" */ int prefix_found; /* found platform independent libraries? */ int exec_prefix_found; /* found the platform dependent libraries? */ @@ -810,7 +810,7 @@ calculate_exec_prefix(PyCalculatePath *calculate, _PyPathConfig *pathconfig) "Could not find platform dependent libraries \n"); } - /* / "lib-dynload" */ + /* / "lib-dynload" */ wchar_t *lib_dynload = joinpath2(calculate->platlibdir, L"lib-dynload"); if (lib_dynload == NULL) { @@ -1296,8 +1296,10 @@ calculate_zip_path(PyCalculatePath *calculate) { PyStatus res; - /* Path: / "pythonXY.zip" */ - wchar_t *path = joinpath2(calculate->platlibdir, L"python" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) L".zip"); + /* Path: / "pythonXY.zip" */ + wchar_t *path = joinpath2(calculate->platlibdir, + L"python" Py_STRINGIFY(PY_MAJOR_VERSION) Py_STRINGIFY(PY_MINOR_VERSION) + L".zip"); if (path == NULL) { return _PyStatus_NO_MEMORY(); } @@ -1305,7 +1307,7 @@ calculate_zip_path(PyCalculatePath *calculate) if (calculate->prefix_found > 0) { /* Use the reduced prefix returned by Py_GetPrefix() - Path: / / "pythonXY.zip" */ + Path: / / "pythonXY.zip" */ wchar_t *parent = _PyMem_RawWcsdup(calculate->prefix); if (parent == NULL) { res = _PyStatus_NO_MEMORY(); @@ -1426,6 +1428,11 @@ static PyStatus calculate_init(PyCalculatePath *calculate, const PyConfig *config) { size_t len; + + calculate->warnings = config->pathconfig_warnings; + calculate->pythonpath_env = config->pythonpath_env; + calculate->platlibdir = config->platlibdir; + const char *path = getenv("PATH"); if (path) { calculate->path_env = Py_DecodeLocale(path, &len); @@ -1452,14 +1459,16 @@ calculate_init(PyCalculatePath *calculate, const PyConfig *config) return DECODE_LOCALE_ERR("VPATH macro", len); } - calculate->lib_python = Py_DecodeLocale(PLATLIBDIR "/python" VERSION, &len); - if (!calculate->lib_python) { + // / "pythonX.Y" + wchar_t *pyversion = Py_DecodeLocale("python" VERSION, &len); + if (!pyversion) { return DECODE_LOCALE_ERR("VERSION macro", len); } - - calculate->warnings = config->pathconfig_warnings; - calculate->pythonpath_env = config->pythonpath_env; - calculate->platlibdir = config->platlibdir; + calculate->lib_python = joinpath2(config->platlibdir, pyversion); + PyMem_RawFree(pyversion); + if (calculate->lib_python == NULL) { + return _PyStatus_NO_MEMORY(); + } return _PyStatus_OK(); } From webhook-mailer at python.org Thu Jun 11 11:31:05 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Jun 2020 15:31:05 -0000 Subject: [Python-checkins] bpo-40275: test.supports imports lazily fnmatch, glob, struct (GH-20810) Message-ID: https://github.com/python/cpython/commit/bdfe9b633a171522bc6cdb7c8aa1215cbd119b59 commit: bdfe9b633a171522bc6cdb7c8aa1215cbd119b59 branch: master author: Victor Stinner committer: GitHub date: 2020-06-11T17:30:57+02:00 summary: bpo-40275: test.supports imports lazily fnmatch, glob, struct (GH-20810) files: M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 3778eed62169c..1ac65533a7b54 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -5,13 +5,10 @@ import contextlib import errno -import fnmatch import functools -import glob import os import re import stat -import struct import sys import sysconfig import time @@ -846,9 +843,11 @@ def python_is_optimized(): _vheader = _header + 'n' def calcobjsize(fmt): + import struct return struct.calcsize(_header + fmt + _align) def calcvobjsize(fmt): + import struct return struct.calcsize(_vheader + fmt + _align) @@ -1255,6 +1254,7 @@ def _compile_match_function(patterns): # The test.bisect_cmd utility only uses such full test identifiers. func = set(patterns).__contains__ else: + import fnmatch regex = '|'.join(map(fnmatch.translate, patterns)) # The search *is* case sensitive on purpose: # don't use flags=re.IGNORECASE @@ -1534,6 +1534,7 @@ def _platform_specific(self): if sys.platform == "win32": def _platform_specific(self): + import glob import _winapi if os.path.lexists(self.real) and not os.path.exists(self.real): From webhook-mailer at python.org Thu Jun 11 11:36:11 2020 From: webhook-mailer at python.org (Hai Shi) Date: Thu, 11 Jun 2020 15:36:11 -0000 Subject: [Python-checkins] bpo-40275: Add warnings_helper submodule in test.support (GH-20797) Message-ID: https://github.com/python/cpython/commit/10e6506aa8261aacc89b49e629ae1c927fa5151c commit: 10e6506aa8261aacc89b49e629ae1c927fa5151c branch: master author: Hai Shi committer: GitHub date: 2020-06-11T17:36:06+02:00 summary: bpo-40275: Add warnings_helper submodule in test.support (GH-20797) files: A Lib/test/support/warnings_helper.py M Doc/library/test.rst M Lib/test/support/__init__.py diff --git a/Doc/library/test.rst b/Doc/library/test.rst index a18197aed3f4a..843201885ad24 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -497,79 +497,6 @@ The :mod:`test.support` module defines the following functions: check_impl_detail(cpython=False) # Everywhere except CPython. -.. function:: check_warnings(\*filters, quiet=True) - - A convenience wrapper for :func:`warnings.catch_warnings()` that makes it - easier to test that a warning was correctly raised. It is approximately - equivalent to calling ``warnings.catch_warnings(record=True)`` with - :meth:`warnings.simplefilter` set to ``always`` and with the option to - automatically validate the results that are recorded. - - ``check_warnings`` accepts 2-tuples of the form ``("message regexp", - WarningCategory)`` as positional arguments. If one or more *filters* are - provided, or if the optional keyword argument *quiet* is ``False``, - it checks to make sure the warnings are as expected: each specified filter - must match at least one of the warnings raised by the enclosed code or the - test fails, and if any warnings are raised that do not match any of the - specified filters the test fails. To disable the first of these checks, - set *quiet* to ``True``. - - If no arguments are specified, it defaults to:: - - check_warnings(("", Warning), quiet=True) - - In this case all warnings are caught and no errors are raised. - - On entry to the context manager, a :class:`WarningRecorder` instance is - returned. The underlying warnings list from - :func:`~warnings.catch_warnings` is available via the recorder object's - :attr:`warnings` attribute. As a convenience, the attributes of the object - representing the most recent warning can also be accessed directly through - the recorder object (see example below). If no warning has been raised, - then any of the attributes that would otherwise be expected on an object - representing a warning will return ``None``. - - The recorder object also has a :meth:`reset` method, which clears the - warnings list. - - The context manager is designed to be used like this:: - - with check_warnings(("assertion is always true", SyntaxWarning), - ("", UserWarning)): - exec('assert(False, "Hey!")') - warnings.warn(UserWarning("Hide me!")) - - In this case if either warning was not raised, or some other warning was - raised, :func:`check_warnings` would raise an error. - - When a test needs to look more deeply into the warnings, rather than - just checking whether or not they occurred, code like this can be used:: - - with check_warnings(quiet=True) as w: - warnings.warn("foo") - assert str(w.args[0]) == "foo" - warnings.warn("bar") - assert str(w.args[0]) == "bar" - assert str(w.warnings[0].args[0]) == "foo" - assert str(w.warnings[1].args[0]) == "bar" - w.reset() - assert len(w.warnings) == 0 - - - Here all warnings will be caught, and the test code tests the captured - warnings directly. - - .. versionchanged:: 3.2 - New optional arguments *filters* and *quiet*. - - -.. function:: check_no_resource_warning(testcase) - - Context manager to check that no :exc:`ResourceWarning` was raised. You - must remove the object which may emit :exc:`ResourceWarning` before the - end of the context manager. - - .. function:: set_memlimit(limit) Set the values for :data:`max_memuse` and :data:`real_max_memuse` for big @@ -851,20 +778,6 @@ The :mod:`test.support` module defines the following functions: the offset of the exception. -.. function:: check_syntax_warning(testcase, statement, errtext='', *, lineno=1, offset=None) - - Test for syntax warning in *statement* by attempting to compile *statement*. - Test also that the :exc:`SyntaxWarning` is emitted only once, and that it - will be converted to a :exc:`SyntaxError` when turned into error. - *testcase* is the :mod:`unittest` instance for the test. *errtext* is the - regular expression which should match the string representation of the - emitted :exc:`SyntaxWarning` and raised :exc:`SyntaxError`. If *lineno* - is not ``None``, compares to the line of the warning and exception. - If *offset* is not ``None``, compares to the offset of the exception. - - .. versionadded:: 3.8 - - .. function:: open_urlresource(url, *args, **kw) Open *url*. If open fails, raises :exc:`TestFailed`. @@ -1051,12 +964,6 @@ The :mod:`test.support` module defines the following classes: Try to match a single stored value (*dv*) with a supplied value (*v*). -.. class:: WarningsRecorder() - - Class used to record warnings for unit tests. See documentation of - :func:`check_warnings` above for more details. - - .. class:: BasicTestRunner() .. method:: run(test) @@ -1659,3 +1566,105 @@ The :mod:`test.support.import_helper` module provides support for import tests. will be reverted at the end of the block. +:mod:`test.support.warnings_helper` --- Utilities for warnings tests +==================================================================== + +.. module:: test.support.warnings_helper + :synopsis: Support for warnings tests. + +The :mod:`test.support.warnings_helper` module provides support for warnings tests. + +.. versionadded:: 3.10 + + +.. function:: check_no_resource_warning(testcase) + + Context manager to check that no :exc:`ResourceWarning` was raised. You + must remove the object which may emit :exc:`ResourceWarning` before the + end of the context manager. + + +.. function:: check_syntax_warning(testcase, statement, errtext='', *, lineno=1, offset=None) + + Test for syntax warning in *statement* by attempting to compile *statement*. + Test also that the :exc:`SyntaxWarning` is emitted only once, and that it + will be converted to a :exc:`SyntaxError` when turned into error. + *testcase* is the :mod:`unittest` instance for the test. *errtext* is the + regular expression which should match the string representation of the + emitted :exc:`SyntaxWarning` and raised :exc:`SyntaxError`. If *lineno* + is not ``None``, compares to the line of the warning and exception. + If *offset* is not ``None``, compares to the offset of the exception. + + .. versionadded:: 3.8 + + +.. function:: check_warnings(\*filters, quiet=True) + + A convenience wrapper for :func:`warnings.catch_warnings()` that makes it + easier to test that a warning was correctly raised. It is approximately + equivalent to calling ``warnings.catch_warnings(record=True)`` with + :meth:`warnings.simplefilter` set to ``always`` and with the option to + automatically validate the results that are recorded. + + ``check_warnings`` accepts 2-tuples of the form ``("message regexp", + WarningCategory)`` as positional arguments. If one or more *filters* are + provided, or if the optional keyword argument *quiet* is ``False``, + it checks to make sure the warnings are as expected: each specified filter + must match at least one of the warnings raised by the enclosed code or the + test fails, and if any warnings are raised that do not match any of the + specified filters the test fails. To disable the first of these checks, + set *quiet* to ``True``. + + If no arguments are specified, it defaults to:: + + check_warnings(("", Warning), quiet=True) + + In this case all warnings are caught and no errors are raised. + + On entry to the context manager, a :class:`WarningRecorder` instance is + returned. The underlying warnings list from + :func:`~warnings.catch_warnings` is available via the recorder object's + :attr:`warnings` attribute. As a convenience, the attributes of the object + representing the most recent warning can also be accessed directly through + the recorder object (see example below). If no warning has been raised, + then any of the attributes that would otherwise be expected on an object + representing a warning will return ``None``. + + The recorder object also has a :meth:`reset` method, which clears the + warnings list. + + The context manager is designed to be used like this:: + + with check_warnings(("assertion is always true", SyntaxWarning), + ("", UserWarning)): + exec('assert(False, "Hey!")') + warnings.warn(UserWarning("Hide me!")) + + In this case if either warning was not raised, or some other warning was + raised, :func:`check_warnings` would raise an error. + + When a test needs to look more deeply into the warnings, rather than + just checking whether or not they occurred, code like this can be used:: + + with check_warnings(quiet=True) as w: + warnings.warn("foo") + assert str(w.args[0]) == "foo" + warnings.warn("bar") + assert str(w.args[0]) == "bar" + assert str(w.warnings[0].args[0]) == "foo" + assert str(w.warnings[1].args[0]) == "bar" + w.reset() + assert len(w.warnings) == 0 + + + Here all warnings will be caught, and the test code tests the captured + warnings directly. + + .. versionchanged:: 3.2 + New optional arguments *filters* and *quiet*. + + +.. class:: WarningsRecorder() + + Class used to record warnings for unit tests. See documentation of + :func:`check_warnings` above for more details. diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 1ac65533a7b54..fa54ebe5620f4 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -14,7 +14,6 @@ import time import types import unittest -import warnings from .import_helper import ( CleanImport, DirsOnSysPath, _ignore_deprecated_imports, @@ -30,6 +29,10 @@ rmtree, skip_unless_symlink, skip_unless_xattr, temp_cwd, temp_dir, temp_umask, unlink, EnvironmentVarGuard, FakePath, _longpath) +from .warnings_helper import ( + WarningsRecorder, _filterwarnings, + check_no_resource_warning, check_no_warnings, + check_syntax_warning, check_warnings, ignore_warnings) from .testresult import get_test_runner @@ -45,7 +48,7 @@ # unittest "is_resource_enabled", "requires", "requires_freebsd_version", "requires_linux_version", "requires_mac_ver", - "check_syntax_error", "check_syntax_warning", + "check_syntax_error", "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", "BasicTestRunner", "run_unittest", "run_doctest", "requires_gzip", "requires_bz2", "requires_lzma", @@ -53,7 +56,6 @@ "requires_IEEE_754", "requires_zlib", "anticipate_failure", "load_package_tests", "detect_api_mismatch", "check__all__", "skip_if_buggy_ucrt_strfptime", - "ignore_warnings", # sys "is_jython", "is_android", "check_impl_detail", "unix_shell", "setswitchinterval", @@ -62,7 +64,6 @@ # processes "reap_children", # miscellaneous - "check_warnings", "check_no_resource_warning", "check_no_warnings", "run_with_locale", "swap_item", "findfile", "swap_attr", "Matcher", "set_memlimit", "SuppressCrashReport", "sortdict", "run_with_tz", "PGO", "missing_compiler_executable", @@ -128,22 +129,6 @@ class ResourceDenied(unittest.SkipTest): and unexpected skips. """ -def ignore_warnings(*, category): - """Decorator to suppress deprecation warnings. - - Use of context managers to hide warnings make diffs - more noisy and tools like 'git blame' less useful. - """ - def decorator(test): - @functools.wraps(test) - def wrapper(self, *args, **kwargs): - with warnings.catch_warnings(): - warnings.simplefilter('ignore', category=category) - return test(self, *args, **kwargs) - return wrapper - return decorator - - def anticipate_failure(condition): """Decorator to mark a test that is known to be broken in some cases @@ -511,32 +496,6 @@ def check_syntax_error(testcase, statement, errtext='', *, lineno=None, offset=N if offset is not None: testcase.assertEqual(err.offset, offset) -def check_syntax_warning(testcase, statement, errtext='', *, lineno=1, offset=None): - # Test also that a warning is emitted only once. - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('always', SyntaxWarning) - compile(statement, '', 'exec') - testcase.assertEqual(len(warns), 1, warns) - - warn, = warns - testcase.assertTrue(issubclass(warn.category, SyntaxWarning), warn.category) - if errtext: - testcase.assertRegex(str(warn.message), errtext) - testcase.assertEqual(warn.filename, '') - testcase.assertIsNotNone(warn.lineno) - if lineno is not None: - testcase.assertEqual(warn.lineno, lineno) - - # SyntaxWarning should be converted to SyntaxError when raised, - # since the latter contains more information and provides better - # error report. - with warnings.catch_warnings(record=True) as warns: - warnings.simplefilter('error', SyntaxWarning) - check_syntax_error(testcase, statement, errtext, - lineno=lineno, offset=offset) - # No warnings are leaked when a SyntaxError is raised. - testcase.assertEqual(warns, []) - def open_urlresource(url, *args, **kw): import urllib.request, urllib.parse @@ -592,134 +551,6 @@ def check_valid_file(fn): raise TestFailed('invalid resource %r' % fn) -class WarningsRecorder(object): - """Convenience wrapper for the warnings list returned on - entry to the warnings.catch_warnings() context manager. - """ - def __init__(self, warnings_list): - self._warnings = warnings_list - self._last = 0 - - def __getattr__(self, attr): - if len(self._warnings) > self._last: - return getattr(self._warnings[-1], attr) - elif attr in warnings.WarningMessage._WARNING_DETAILS: - return None - raise AttributeError("%r has no attribute %r" % (self, attr)) - - @property - def warnings(self): - return self._warnings[self._last:] - - def reset(self): - self._last = len(self._warnings) - - -def _filterwarnings(filters, quiet=False): - """Catch the warnings, then check if all the expected - warnings have been raised and re-raise unexpected warnings. - If 'quiet' is True, only re-raise the unexpected warnings. - """ - # Clear the warning registry of the calling module - # in order to re-raise the warnings. - frame = sys._getframe(2) - registry = frame.f_globals.get('__warningregistry__') - if registry: - registry.clear() - with warnings.catch_warnings(record=True) as w: - # Set filter "always" to record all warnings. Because - # test_warnings swap the module, we need to look up in - # the sys.modules dictionary. - sys.modules['warnings'].simplefilter("always") - yield WarningsRecorder(w) - # Filter the recorded warnings - reraise = list(w) - missing = [] - for msg, cat in filters: - seen = False - for w in reraise[:]: - warning = w.message - # Filter out the matching messages - if (re.match(msg, str(warning), re.I) and - issubclass(warning.__class__, cat)): - seen = True - reraise.remove(w) - if not seen and not quiet: - # This filter caught nothing - missing.append((msg, cat.__name__)) - if reraise: - raise AssertionError("unhandled warning %s" % reraise[0]) - if missing: - raise AssertionError("filter (%r, %s) did not catch any warning" % - missing[0]) - - - at contextlib.contextmanager -def check_warnings(*filters, **kwargs): - """Context manager to silence warnings. - - Accept 2-tuples as positional arguments: - ("message regexp", WarningCategory) - - Optional argument: - - if 'quiet' is True, it does not fail if a filter catches nothing - (default True without argument, - default False if some filters are defined) - - Without argument, it defaults to: - check_warnings(("", Warning), quiet=True) - """ - quiet = kwargs.get('quiet') - if not filters: - filters = (("", Warning),) - # Preserve backward compatibility - if quiet is None: - quiet = True - return _filterwarnings(filters, quiet) - - - at contextlib.contextmanager -def check_no_warnings(testcase, message='', category=Warning, force_gc=False): - """Context manager to check that no warnings are emitted. - - This context manager enables a given warning within its scope - and checks that no warnings are emitted even with that warning - enabled. - - If force_gc is True, a garbage collection is attempted before checking - for warnings. This may help to catch warnings emitted when objects - are deleted, such as ResourceWarning. - - Other keyword arguments are passed to warnings.filterwarnings(). - """ - with warnings.catch_warnings(record=True) as warns: - warnings.filterwarnings('always', - message=message, - category=category) - yield - if force_gc: - gc_collect() - testcase.assertEqual(warns, []) - - - at contextlib.contextmanager -def check_no_resource_warning(testcase): - """Context manager to check that no ResourceWarning is emitted. - - Usage: - - with check_no_resource_warning(self): - f = open(...) - ... - del f - - You must remove the object which may emit ResourceWarning before - the end of the context manager. - """ - with check_no_warnings(testcase, category=ResourceWarning, force_gc=True): - yield - - class TransientResource(object): """Raise ResourceDenied if an exception is raised while the context manager @@ -978,6 +809,7 @@ def __init__(self): self.started = False def start(self): + import warnings try: f = open(self.procfile, 'r') except OSError as e: diff --git a/Lib/test/support/warnings_helper.py b/Lib/test/support/warnings_helper.py new file mode 100644 index 0000000000000..c9f9045405b80 --- /dev/null +++ b/Lib/test/support/warnings_helper.py @@ -0,0 +1,180 @@ +import contextlib +import functools +import re +import sys +import warnings + + +def check_syntax_warning(testcase, statement, errtext='', + *, lineno=1, offset=None): + # Test also that a warning is emitted only once. + from test.support import check_syntax_error + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter('always', SyntaxWarning) + compile(statement, '', 'exec') + testcase.assertEqual(len(warns), 1, warns) + + warn, = warns + testcase.assertTrue(issubclass(warn.category, SyntaxWarning), + warn.category) + if errtext: + testcase.assertRegex(str(warn.message), errtext) + testcase.assertEqual(warn.filename, '') + testcase.assertIsNotNone(warn.lineno) + if lineno is not None: + testcase.assertEqual(warn.lineno, lineno) + + # SyntaxWarning should be converted to SyntaxError when raised, + # since the latter contains more information and provides better + # error report. + with warnings.catch_warnings(record=True) as warns: + warnings.simplefilter('error', SyntaxWarning) + check_syntax_error(testcase, statement, errtext, + lineno=lineno, offset=offset) + # No warnings are leaked when a SyntaxError is raised. + testcase.assertEqual(warns, []) + + +def ignore_warnings(*, category): + """Decorator to suppress deprecation warnings. + + Use of context managers to hide warnings make diffs + more noisy and tools like 'git blame' less useful. + """ + def decorator(test): + @functools.wraps(test) + def wrapper(self, *args, **kwargs): + with warnings.catch_warnings(): + warnings.simplefilter('ignore', category=category) + return test(self, *args, **kwargs) + return wrapper + return decorator + + +class WarningsRecorder(object): + """Convenience wrapper for the warnings list returned on + entry to the warnings.catch_warnings() context manager. + """ + def __init__(self, warnings_list): + self._warnings = warnings_list + self._last = 0 + + def __getattr__(self, attr): + if len(self._warnings) > self._last: + return getattr(self._warnings[-1], attr) + elif attr in warnings.WarningMessage._WARNING_DETAILS: + return None + raise AttributeError("%r has no attribute %r" % (self, attr)) + + @property + def warnings(self): + return self._warnings[self._last:] + + def reset(self): + self._last = len(self._warnings) + + + at contextlib.contextmanager +def check_warnings(*filters, **kwargs): + """Context manager to silence warnings. + + Accept 2-tuples as positional arguments: + ("message regexp", WarningCategory) + + Optional argument: + - if 'quiet' is True, it does not fail if a filter catches nothing + (default True without argument, + default False if some filters are defined) + + Without argument, it defaults to: + check_warnings(("", Warning), quiet=True) + """ + quiet = kwargs.get('quiet') + if not filters: + filters = (("", Warning),) + # Preserve backward compatibility + if quiet is None: + quiet = True + return _filterwarnings(filters, quiet) + + + at contextlib.contextmanager +def check_no_warnings(testcase, message='', category=Warning, force_gc=False): + """Context manager to check that no warnings are emitted. + + This context manager enables a given warning within its scope + and checks that no warnings are emitted even with that warning + enabled. + + If force_gc is True, a garbage collection is attempted before checking + for warnings. This may help to catch warnings emitted when objects + are deleted, such as ResourceWarning. + + Other keyword arguments are passed to warnings.filterwarnings(). + """ + from test.support import gc_collect + with warnings.catch_warnings(record=True) as warns: + warnings.filterwarnings('always', + message=message, + category=category) + yield + if force_gc: + gc_collect() + testcase.assertEqual(warns, []) + + + at contextlib.contextmanager +def check_no_resource_warning(testcase): + """Context manager to check that no ResourceWarning is emitted. + + Usage: + + with check_no_resource_warning(self): + f = open(...) + ... + del f + + You must remove the object which may emit ResourceWarning before + the end of the context manager. + """ + with check_no_warnings(testcase, category=ResourceWarning, force_gc=True): + yield + + +def _filterwarnings(filters, quiet=False): + """Catch the warnings, then check if all the expected + warnings have been raised and re-raise unexpected warnings. + If 'quiet' is True, only re-raise the unexpected warnings. + """ + # Clear the warning registry of the calling module + # in order to re-raise the warnings. + frame = sys._getframe(2) + registry = frame.f_globals.get('__warningregistry__') + if registry: + registry.clear() + with warnings.catch_warnings(record=True) as w: + # Set filter "always" to record all warnings. Because + # test_warnings swap the module, we need to look up in + # the sys.modules dictionary. + sys.modules['warnings'].simplefilter("always") + yield WarningsRecorder(w) + # Filter the recorded warnings + reraise = list(w) + missing = [] + for msg, cat in filters: + seen = False + for w in reraise[:]: + warning = w.message + # Filter out the matching messages + if (re.match(msg, str(warning), re.I) and + issubclass(warning.__class__, cat)): + seen = True + reraise.remove(w) + if not seen and not quiet: + # This filter caught nothing + missing.append((msg, cat.__name__)) + if reraise: + raise AssertionError("unhandled warning %s" % reraise[0]) + if missing: + raise AssertionError("filter (%r, %s) did not catch any warning" % + missing[0]) From webhook-mailer at python.org Thu Jun 11 12:09:25 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Thu, 11 Jun 2020 16:09:25 -0000 Subject: [Python-checkins] bpo-40939: Remove PEG parser easter egg (__new_parser__) (#20802) Message-ID: https://github.com/python/cpython/commit/bcd7deed9118e365c1225de2a2e1a81bf988c6ab commit: bcd7deed9118e365c1225de2a2e1a81bf988c6ab branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-11T09:09:21-07:00 summary: bpo-40939: Remove PEG parser easter egg (__new_parser__) (#20802) It no longer serves a purpose (there's only one parser) and having "new" in any name will eventually look odd. Also, it impinges on a potential sub-namespace, `__new_...__`. files: M Grammar/python.gram M Lib/keyword.py M Lib/pydoc.py M Parser/pegen/parse.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 2c350ef68a214..745c14ebb9803 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -477,7 +477,6 @@ atom[expr_ty]: | 'True' { _Py_Constant(Py_True, NULL, EXTRA) } | 'False' { _Py_Constant(Py_False, NULL, EXTRA) } | 'None' { _Py_Constant(Py_None, NULL, EXTRA) } - | '__new_parser__' { RAISE_SYNTAX_ERROR("You found it!") } | &STRING strings | NUMBER | &'(' (tuple | group | genexp) diff --git a/Lib/keyword.py b/Lib/keyword.py index afc3db3942ccb..b6a9982570211 100644 --- a/Lib/keyword.py +++ b/Lib/keyword.py @@ -19,7 +19,6 @@ 'False', 'None', 'True', - '__new_parser__', 'and', 'as', 'assert', diff --git a/Lib/pydoc.py b/Lib/pydoc.py index a5368bf8bfe55..628f9fc7d1d1e 100755 --- a/Lib/pydoc.py +++ b/Lib/pydoc.py @@ -1817,7 +1817,6 @@ class Helper: 'False': '', 'None': '', 'True': '', - '__new_parser__': '', 'and': 'BOOLEAN', 'as': 'with', 'assert': ('assert', ''), diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c index 4f13bf772f261..d28e6c83aadb0 100644 --- a/Parser/pegen/parse.c +++ b/Parser/pegen/parse.c @@ -7,7 +7,7 @@ extern int Py_DebugFlag; #else #define D(x) #endif -static const int n_keyword_lists = 15; +static const int n_keyword_lists = 9; static KeywordToken *reserved_keywords[] = { NULL, NULL, @@ -15,8 +15,8 @@ static KeywordToken *reserved_keywords[] = { {"if", 510}, {"in", 518}, {"is", 526}, - {"as", 531}, - {"or", 532}, + {"as", 530}, + {"or", 531}, {NULL, -1}, }, (KeywordToken[]) { @@ -25,7 +25,7 @@ static KeywordToken *reserved_keywords[] = { {"for", 517}, {"def", 522}, {"not", 525}, - {"and", 533}, + {"and", 532}, {NULL, -1}, }, (KeywordToken[]) { @@ -65,15 +65,6 @@ static KeywordToken *reserved_keywords[] = { {"nonlocal", 509}, {NULL, -1}, }, - NULL, - NULL, - NULL, - NULL, - NULL, - (KeywordToken[]) { - {"__new_parser__", 530}, - {NULL, -1}, - }, }; #define file_type 1000 #define interactive_type 1001 @@ -10567,7 +10558,6 @@ slice_rule(Parser *p) // | 'True' // | 'False' // | 'None' -// | '__new_parser__' // | &STRING strings // | NUMBER // | &'(' (tuple | group | genexp) @@ -10711,30 +10701,6 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); } - { // '__new_parser__' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'__new_parser__'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='__new_parser__' - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'__new_parser__'")); - _res = RAISE_SYNTAX_ERROR ( "You found it!" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'__new_parser__'")); - } { // &STRING strings if (p->error_indicator) { D(p->level--); @@ -17313,7 +17279,7 @@ _tmp_34_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -17471,7 +17437,7 @@ _tmp_37_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -17971,7 +17937,7 @@ _tmp_46_rule(Parser *p) Token * _keyword; expr_ty t; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (t = target_rule(p)) // target ) @@ -18086,7 +18052,7 @@ _tmp_48_rule(Parser *p) Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='as' + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) @@ -23892,7 +23858,7 @@ _tmp_144_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 532)) // token='or' + (_keyword = _PyPegen_expect_token(p, 531)) // token='or' && (c = conjunction_rule(p)) // conjunction ) @@ -23936,7 +23902,7 @@ _tmp_145_rule(Parser *p) Token * _keyword; expr_ty c; if ( - (_keyword = _PyPegen_expect_token(p, 533)) // token='and' + (_keyword = _PyPegen_expect_token(p, 532)) // token='and' && (c = inversion_rule(p)) // inversion ) From webhook-mailer at python.org Thu Jun 11 12:26:29 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 11 Jun 2020 16:26:29 -0000 Subject: [Python-checkins] bpo-40275: Move TransientResource to test_urllib2net (GH-20812) Message-ID: https://github.com/python/cpython/commit/311110abcd8ab648dbf1803e36a8ba5d93fa019b commit: 311110abcd8ab648dbf1803e36a8ba5d93fa019b branch: master author: Victor Stinner committer: GitHub date: 2020-06-11T18:26:23+02:00 summary: bpo-40275: Move TransientResource to test_urllib2net (GH-20812) Move TransientResource, time_out, socket_peer_reset and ioerror_peer_reset from test.support to test_urllib2net. Remove "import errno" from test.support. files: M Doc/library/test.rst M Lib/test/support/__init__.py M Lib/test/test_support.py M Lib/test/test_urllib2net.py diff --git a/Doc/library/test.rst b/Doc/library/test.rst index 843201885ad24..b39b601fb64f6 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -922,15 +922,6 @@ The :mod:`test.support` module defines the following functions: The :mod:`test.support` module defines the following classes: -.. class:: TransientResource(exc, **kwargs) - - Instances are a context manager that raises :exc:`ResourceDenied` if the - specified exception type is raised. Any keyword arguments are treated as - attribute/value pairs to be compared against any exception raised within the - :keyword:`with` statement. Only if all pairs match properly against - attributes on the exception is :exc:`ResourceDenied` raised. - - .. class:: SuppressCrashReport() A context manager used to try to prevent crash dialog popups on tests that diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index fa54ebe5620f4..f078ad780a0d4 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -4,7 +4,6 @@ raise ImportError('support must be imported from the test package') import contextlib -import errno import functools import os import re @@ -49,7 +48,6 @@ "is_resource_enabled", "requires", "requires_freebsd_version", "requires_linux_version", "requires_mac_ver", "check_syntax_error", - "TransientResource", "time_out", "socket_peer_reset", "ioerror_peer_reset", "BasicTestRunner", "run_unittest", "run_doctest", "requires_gzip", "requires_bz2", "requires_lzma", "bigmemtest", "bigaddrspacetest", "cpython_only", "get_attribute", @@ -551,39 +549,6 @@ def check_valid_file(fn): raise TestFailed('invalid resource %r' % fn) -class TransientResource(object): - - """Raise ResourceDenied if an exception is raised while the context manager - is in effect that matches the specified exception and attributes.""" - - def __init__(self, exc, **kwargs): - self.exc = exc - self.attrs = kwargs - - def __enter__(self): - return self - - def __exit__(self, type_=None, value=None, traceback=None): - """If type_ is a subclass of self.exc and value has attributes matching - self.attrs, raise ResourceDenied. Otherwise let the exception - propagate (if any).""" - if type_ is not None and issubclass(self.exc, type_): - for attr, attr_value in self.attrs.items(): - if not hasattr(value, attr): - break - if getattr(value, attr) != attr_value: - break - else: - raise ResourceDenied("an optional resource is not available") - -# Context managers that raise ResourceDenied when various issues -# with the Internet connection manifest themselves as exceptions. -# XXX deprecate these and use transient_internet() instead -time_out = TransientResource(OSError, errno=errno.ETIMEDOUT) -socket_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) -ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) - - @contextlib.contextmanager def captured_output(stream_name): """Return a context manager used by captured_stdout/stdin/stderr diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index b5a16f9cb6027..a7d5b1bfe4eaf 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -660,7 +660,6 @@ def test_print_warning(self): # findfile # check_warnings # EnvironmentVarGuard - # TransientResource # transient_internet # run_with_locale # set_memlimit diff --git a/Lib/test/test_urllib2net.py b/Lib/test/test_urllib2net.py index ba4c500e8ec3e..cb74685715d35 100644 --- a/Lib/test/test_urllib2net.py +++ b/Lib/test/test_urllib2net.py @@ -1,3 +1,4 @@ +import errno import unittest from test import support from test.support import socket_helper @@ -39,6 +40,39 @@ def wrapped(*args, **kwargs): urllib.error.URLError) +class TransientResource(object): + + """Raise ResourceDenied if an exception is raised while the context manager + is in effect that matches the specified exception and attributes.""" + + def __init__(self, exc, **kwargs): + self.exc = exc + self.attrs = kwargs + + def __enter__(self): + return self + + def __exit__(self, type_=None, value=None, traceback=None): + """If type_ is a subclass of self.exc and value has attributes matching + self.attrs, raise ResourceDenied. Otherwise let the exception + propagate (if any).""" + if type_ is not None and issubclass(self.exc, type_): + for attr, attr_value in self.attrs.items(): + if not hasattr(value, attr): + break + if getattr(value, attr) != attr_value: + break + else: + raise ResourceDenied("an optional resource is not available") + +# Context managers that raise ResourceDenied when various issues +# with the Internet connection manifest themselves as exceptions. +# XXX deprecate these and use transient_internet() instead +time_out = TransientResource(OSError, errno=errno.ETIMEDOUT) +socket_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) +ioerror_peer_reset = TransientResource(OSError, errno=errno.ECONNRESET) + + class AuthTests(unittest.TestCase): """Tests urllib2 authentication features.""" @@ -237,9 +271,9 @@ def _test_urls(self, urls, handlers, retry=True): raise else: try: - with support.time_out, \ - support.socket_peer_reset, \ - support.ioerror_peer_reset: + with time_out, \ + socket_peer_reset, \ + ioerror_peer_reset: buf = f.read() debug("read %d bytes" % len(buf)) except socket.timeout: From webhook-mailer at python.org Thu Jun 11 12:30:58 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Thu, 11 Jun 2020 16:30:58 -0000 Subject: [Python-checkins] bpo-40939: Remove the old parser (GH-20768) Message-ID: https://github.com/python/cpython/commit/1ed83adb0e95305af858bd41af531e487f54fee7 commit: 1ed83adb0e95305af858bd41af531e487f54fee7 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-11T17:30:46+01:00 summary: bpo-40939: Remove the old parser (GH-20768) This commit removes the old parser, the deprecated parser module, the old parser compatibility flags and environment variables and all associated support code and documentation. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-10-11-27-15.bpo-40939.DO-wAI.rst A Parser/peg_api.c A Parser/pegen.c A Parser/pegen.h A Parser/string_parser.c A Parser/string_parser.h D Lib/test/test_parser.py D Lib/test/test_peg_parser.py D Modules/_peg_parser.c D Modules/parsermodule.c D Parser/acceler.c D Parser/grammar1.c D Parser/listnode.c D Parser/parser.h D Parser/parsetok.c D Parser/pegen/parse.c D Parser/pegen/parse_string.c D Parser/pegen/parse_string.h D Parser/pegen/peg_api.c D Parser/pegen/pegen.c D Parser/pegen/pegen.h D Parser/pgen/__init__.py D Parser/pgen/__main__.py D Parser/pgen/automata.py D Parser/pgen/grammar.py D Parser/pgen/keywordgen.py D Parser/pgen/metaparser.py D Parser/pgen/pgen.py D Parser/pgen/token.py M Doc/c-api/init_config.rst M Doc/using/cmdline.rst M Include/cpython/initconfig.h M Lib/subprocess.py M Lib/test/support/__init__.py M Lib/test/test_embed.py M Lib/test/test_exceptions.py M Lib/test/test_flufl.py M Lib/test/test_fstring.py M Lib/test/test_grammar.py M Lib/test/test_string_literals.py M Lib/test/test_syntax.py M Lib/test/test_traceback.py M Makefile.pre.in M Modules/Setup M PC/config.c M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M PCbuild/regen.vcxproj M Parser/parser.c M Programs/_testembed.c M Python/ast.c M Python/compile.c M Python/initconfig.c M Python/pylifecycle.c M Python/pythonrun.c M Tools/peg_generator/pegen/build.py M configure.ac M setup.py diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index b7298ba825d3c..9b0728d962152 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -695,16 +695,6 @@ PyConfig :data:`sys._xoptions`. - .. c:member:: int _use_peg_parser - - Enable PEG parser? Default: 1. - - Set to 0 by :option:`-X oldparser <-X>` and :envvar:`PYTHONOLDPARSER`. - - See also :pep:`617`. - - .. deprecated-removed:: 3.9 3.10 - If ``parse_argv`` is non-zero, ``argv`` arguments are parsed the same way the regular Python parses command line arguments, and Python arguments are stripped from ``argv``: see :ref:`Command Line Arguments diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index f91ab020da5cf..7aacd8ffe822e 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -426,8 +426,6 @@ Miscellaneous options defines the following possible values: * ``-X faulthandler`` to enable :mod:`faulthandler`; - * ``-X oldparser``: enable the traditional LL(1) parser. See also - :envvar:`PYTHONOLDPARSER` and :pep:`617`. * ``-X showrefcount`` to output the total reference count and number of used memory blocks when the program finishes or after each statement in the interactive interpreter. This only works on debug builds. @@ -587,15 +585,6 @@ conflict. :option:`-d` multiple times. -.. envvar:: PYTHONOLDPARSER - - If this is set to a non-empty string, enable the traditional LL(1) parser. - - See also the :option:`-X` ``oldparser`` option and :pep:`617`. - - .. deprecated-removed:: 3.9 3.10 - - .. envvar:: PYTHONINSPECT If this is set to a non-empty string it is equivalent to specifying the diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index 57933211bb937..5b05eab63bb46 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -144,10 +144,6 @@ typedef struct { Set to 1 by -X faulthandler and PYTHONFAULTHANDLER. -1 means unset. */ int faulthandler; - /* Enable PEG parser? - 1 by default, set to 0 by -X oldparser and PYTHONOLDPARSER */ - int _use_peg_parser; - /* Enable tracemalloc? Set by -X tracemalloc=N and PYTHONTRACEMALLOC. -1 means unset */ int tracemalloc; diff --git a/Lib/subprocess.py b/Lib/subprocess.py index 13600c28cf711..86fdf27f9b03b 100644 --- a/Lib/subprocess.py +++ b/Lib/subprocess.py @@ -326,7 +326,7 @@ def _args_from_interpreter_flags(): if dev_mode: args.extend(('-X', 'dev')) for opt in ('faulthandler', 'tracemalloc', 'importtime', - 'showrefcount', 'utf8', 'oldparser'): + 'showrefcount', 'utf8'): if opt in xoptions: value = xoptions[opt] if value is True: diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index f078ad780a0d4..498da6415080f 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1958,13 +1958,3 @@ def wait_process(pid, *, exitcode, timeout=None): # sanity check: it should not fail in practice if pid2 != pid: raise AssertionError(f"pid {pid2} != pid {pid}") - - -def use_old_parser(): - import _testinternalcapi - config = _testinternalcapi.get_configs() - return (config['config']['_use_peg_parser'] == 0) - - -def skip_if_new_parser(msg): - return unittest.skipIf(not use_old_parser(), msg) diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index b7b70589da52b..fe47289777a42 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -347,7 +347,6 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): 'isolated': 0, 'use_environment': 1, 'dev_mode': 0, - '_use_peg_parser': 1, 'install_signal_handlers': 1, 'use_hash_seed': 0, @@ -733,7 +732,6 @@ def test_init_from_config(self): 'import_time': 1, 'show_ref_count': 1, 'malloc_stats': 1, - '_use_peg_parser': 0, 'stdio_encoding': 'iso8859-1', 'stdio_errors': 'replace', @@ -807,7 +805,6 @@ def test_init_compat_env(self): 'warnoptions': ['EnvVar'], 'platlibdir': 'env_platlibdir', 'module_search_paths': self.IGNORE_CONFIG, - '_use_peg_parser': 0, } self.check_all_configs("test_init_compat_env", config, preconfig, api=API_COMPAT) @@ -837,7 +834,6 @@ def test_init_python_env(self): 'warnoptions': ['EnvVar'], 'platlibdir': 'env_platlibdir', 'module_search_paths': self.IGNORE_CONFIG, - '_use_peg_parser': 0, } self.check_all_configs("test_init_python_env", config, preconfig, api=API_PYTHON) diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index 196f31e76ccbe..feae31b142bf3 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -251,9 +251,9 @@ def baz(): check('def f():\n x, y: int', 2, 3) check('[*x for x in xs]', 1, 2) check('foo(x for x in range(10), 100)', 1, 5) - check('(yield i) = 2', 1, 1 if support.use_old_parser() else 2) - check('def f(*):\n pass', 1, 7 if support.use_old_parser() else 8) - check('for 1 in []: pass', 1, 5 if support.use_old_parser() else 7) + check('(yield i) = 2', 1, 2) + check('def f(*):\n pass', 1, 8) + check('for 1 in []: pass', 1, 7) @cpython_only def testSettingException(self): diff --git a/Lib/test/test_flufl.py b/Lib/test/test_flufl.py index 22285859a92bb..0ff54aa227e37 100644 --- a/Lib/test/test_flufl.py +++ b/Lib/test/test_flufl.py @@ -20,7 +20,7 @@ def test_barry_as_bdfl(self): self.assertTrue(cm.exception.lineno, 2) # The old parser reports the end of the token and the new # parser reports the start of the token - self.assertEqual(cm.exception.offset, 4 if support.use_old_parser() else 3) + self.assertEqual(cm.exception.offset, 3) def test_guido_as_bdfl(self): code = '2 {0} 3' @@ -33,7 +33,7 @@ def test_guido_as_bdfl(self): self.assertEqual(cm.exception.lineno, 1) # The old parser reports the end of the token and the new # parser reports the start of the token - self.assertEqual(cm.exception.offset, 4 if support.use_old_parser() else 3) + self.assertEqual(cm.exception.offset, 3) if __name__ == '__main__': diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index 9048e89689df2..9eb7ebe10559a 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -12,7 +12,7 @@ import types import decimal import unittest -from test.support import temp_cwd, use_old_parser +from test.support import temp_cwd from test.support.script_helper import assert_python_failure a_global = 'global variable' @@ -1049,7 +1049,6 @@ def test_errors(self): r"f'{1000:j}'", ]) - @unittest.skipIf(use_old_parser(), "The old parser only supports as the filename") def test_filename_in_syntaxerror(self): # see issue 38964 with temp_cwd() as cwd: diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index e1a402e2b463b..ef7d1a15c7570 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1,7 +1,7 @@ # Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. -from test.support import check_syntax_error, check_syntax_warning, use_old_parser +from test.support import check_syntax_error, check_syntax_warning import inspect import unittest import sys @@ -1714,69 +1714,53 @@ def __exit__(self, *args): with manager() as x, manager(): pass - if not use_old_parser(): - test_cases = [ - """if 1: - with ( - manager() - ): - pass - """, - """if 1: - with ( - manager() as x - ): - pass - """, - """if 1: - with ( - manager() as (x, y), - manager() as z, - ): - pass - """, - """if 1: - with ( - manager(), - manager() - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() as y - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() as y, - manager() as z, - ): - pass - """, - """if 1: - with ( - manager() as x, - manager() as y, - manager(), - ): - pass - """, - ] - for case in test_cases: - with self.subTest(case=case): - compile(case, "", "exec") + with ( + manager() + ): + pass + + with ( + manager() as x + ): + pass + + with ( + manager() as (x, y), + manager() as z, + ): + pass + + with ( + manager(), + manager() + ): + pass + with ( + manager() as x, + manager() as y + ): + pass + + with ( + manager() as x, + manager() + ): + pass + + with ( + manager() as x, + manager() as y, + manager() as z, + ): + pass + + with ( + manager() as x, + manager() as y, + manager(), + ): + pass def test_if_else_expr(self): # Test ifelse expressions in various cases diff --git a/Lib/test/test_parser.py b/Lib/test/test_parser.py deleted file mode 100644 index a4d2cdc090aa2..0000000000000 --- a/Lib/test/test_parser.py +++ /dev/null @@ -1,1046 +0,0 @@ -import copy -import warnings -with warnings.catch_warnings(): - warnings.filterwarnings('ignore', 'The parser module is deprecated', - DeprecationWarning) - import parser -import pickle -import unittest -import operator -import struct -from test import support -from test.support.script_helper import assert_python_failure -from test.support.script_helper import assert_python_ok - -# -# First, we test that we can generate trees from valid source fragments, -# and that these valid trees are indeed allowed by the tree-loading side -# of the parser module. -# - -class RoundtripLegalSyntaxTestCase(unittest.TestCase): - - def roundtrip(self, f, s): - st1 = f(s) - t = st1.totuple() - try: - st2 = parser.sequence2st(t) - except parser.ParserError as why: - self.fail("could not roundtrip %r: %s" % (s, why)) - - self.assertEqual(t, st2.totuple(), - "could not re-generate syntax tree") - - def check_expr(self, s): - self.roundtrip(parser.expr, s) - - def test_flags_passed(self): - # The unicode literals flags has to be passed from the parser to AST - # generation. - suite = parser.suite("from __future__ import unicode_literals; x = ''") - code = suite.compile() - scope = {} - exec(code, {}, scope) - self.assertIsInstance(scope["x"], str) - - def check_suite(self, s): - self.roundtrip(parser.suite, s) - - def test_yield_statement(self): - self.check_suite("def f(): yield 1") - self.check_suite("def f(): yield") - self.check_suite("def f(): x += yield") - self.check_suite("def f(): x = yield 1") - self.check_suite("def f(): x = y = yield 1") - self.check_suite("def f(): x = yield") - self.check_suite("def f(): x = y = yield") - self.check_suite("def f(): 1 + (yield)*2") - self.check_suite("def f(): (yield 1)*2") - self.check_suite("def f(): return; yield 1") - self.check_suite("def f(): yield 1; return") - self.check_suite("def f(): yield from 1") - self.check_suite("def f(): x = yield from 1") - self.check_suite("def f(): f((yield from 1))") - self.check_suite("def f(): yield 1; return 1") - self.check_suite("def f():\n" - " for x in range(30):\n" - " yield x\n") - self.check_suite("def f():\n" - " if (yield):\n" - " yield x\n") - - def test_await_statement(self): - self.check_suite("async def f():\n await smth()") - self.check_suite("async def f():\n foo = await smth()") - self.check_suite("async def f():\n foo, bar = await smth()") - self.check_suite("async def f():\n (await smth())") - self.check_suite("async def f():\n foo((await smth()))") - self.check_suite("async def f():\n await foo(); return 42") - - def test_async_with_statement(self): - self.check_suite("async def f():\n async with 1: pass") - self.check_suite("async def f():\n async with a as b, c as d: pass") - - def test_async_for_statement(self): - self.check_suite("async def f():\n async for i in (): pass") - self.check_suite("async def f():\n async for i, b in (): pass") - - def test_nonlocal_statement(self): - self.check_suite("def f():\n" - " x = 0\n" - " def g():\n" - " nonlocal x\n") - self.check_suite("def f():\n" - " x = y = 0\n" - " def g():\n" - " nonlocal x, y\n") - - def test_expressions(self): - self.check_expr("foo(1)") - self.check_expr("[1, 2, 3]") - self.check_expr("[x**3 for x in range(20)]") - self.check_expr("[x**3 for x in range(20) if x % 3]") - self.check_expr("[x**3 for x in range(20) if x % 2 if x % 3]") - self.check_expr("list(x**3 for x in range(20))") - self.check_expr("list(x**3 for x in range(20) if x % 3)") - self.check_expr("list(x**3 for x in range(20) if x % 2 if x % 3)") - self.check_expr("foo(*args)") - self.check_expr("foo(*args, **kw)") - self.check_expr("foo(**kw)") - self.check_expr("foo(key=value)") - self.check_expr("foo(key=value, *args)") - self.check_expr("foo(key=value, *args, **kw)") - self.check_expr("foo(key=value, **kw)") - self.check_expr("foo(a, b, c, *args)") - self.check_expr("foo(a, b, c, *args, **kw)") - self.check_expr("foo(a, b, c, **kw)") - self.check_expr("foo(a, *args, keyword=23)") - self.check_expr("foo + bar") - self.check_expr("foo - bar") - self.check_expr("foo * bar") - self.check_expr("foo / bar") - self.check_expr("foo // bar") - self.check_expr("(foo := 1)") - self.check_expr("lambda: 0") - self.check_expr("lambda x: 0") - self.check_expr("lambda *y: 0") - self.check_expr("lambda *y, **z: 0") - self.check_expr("lambda **z: 0") - self.check_expr("lambda x, y: 0") - self.check_expr("lambda foo=bar: 0") - self.check_expr("lambda foo=bar, spaz=nifty+spit: 0") - self.check_expr("lambda foo=bar, **z: 0") - self.check_expr("lambda foo=bar, blaz=blat+2, **z: 0") - self.check_expr("lambda foo=bar, blaz=blat+2, *y, **z: 0") - self.check_expr("lambda x, *y, **z: 0") - self.check_expr("(x for x in range(10))") - self.check_expr("foo(x for x in range(10))") - self.check_expr("...") - self.check_expr("a[...]") - - def test_simple_expression(self): - # expr_stmt - self.check_suite("a") - - def test_simple_assignments(self): - self.check_suite("a = b") - self.check_suite("a = b = c = d = e") - - def test_var_annot(self): - self.check_suite("x: int = 5") - self.check_suite("y: List[T] = []; z: [list] = fun()") - self.check_suite("x: tuple = (1, 2)") - self.check_suite("d[f()]: int = 42") - self.check_suite("f(d[x]): str = 'abc'") - self.check_suite("x.y.z.w: complex = 42j") - self.check_suite("x: int") - self.check_suite("def f():\n" - " x: str\n" - " y: int = 5\n") - self.check_suite("class C:\n" - " x: str\n" - " y: int = 5\n") - self.check_suite("class C:\n" - " def __init__(self, x: int) -> None:\n" - " self.x: int = x\n") - # double check for nonsense - with self.assertRaises(SyntaxError): - exec("2+2: int", {}, {}) - with self.assertRaises(SyntaxError): - exec("[]: int = 5", {}, {}) - with self.assertRaises(SyntaxError): - exec("x, *y, z: int = range(5)", {}, {}) - with self.assertRaises(SyntaxError): - exec("x: int = 1, y = 2", {}, {}) - with self.assertRaises(SyntaxError): - exec("u = v: int", {}, {}) - with self.assertRaises(SyntaxError): - exec("False: int", {}, {}) - with self.assertRaises(SyntaxError): - exec("x.False: int", {}, {}) - with self.assertRaises(SyntaxError): - exec("x.y,: int", {}, {}) - with self.assertRaises(SyntaxError): - exec("[0]: int", {}, {}) - with self.assertRaises(SyntaxError): - exec("f(): int", {}, {}) - - def test_simple_augmented_assignments(self): - self.check_suite("a += b") - self.check_suite("a -= b") - self.check_suite("a *= b") - self.check_suite("a /= b") - self.check_suite("a //= b") - self.check_suite("a %= b") - self.check_suite("a &= b") - self.check_suite("a |= b") - self.check_suite("a ^= b") - self.check_suite("a <<= b") - self.check_suite("a >>= b") - self.check_suite("a **= b") - - def test_function_defs(self): - self.check_suite("def f(): pass") - self.check_suite("def f(*args): pass") - self.check_suite("def f(*args, **kw): pass") - self.check_suite("def f(**kw): pass") - self.check_suite("def f(foo=bar): pass") - self.check_suite("def f(foo=bar, *args): pass") - self.check_suite("def f(foo=bar, *args, **kw): pass") - self.check_suite("def f(foo=bar, **kw): pass") - - self.check_suite("def f(a, b): pass") - self.check_suite("def f(a, b, *args): pass") - self.check_suite("def f(a, b, *args, **kw): pass") - self.check_suite("def f(a, b, **kw): pass") - self.check_suite("def f(a, b, foo=bar): pass") - self.check_suite("def f(a, b, foo=bar, *args): pass") - self.check_suite("def f(a, b, foo=bar, *args, **kw): pass") - self.check_suite("def f(a, b, foo=bar, **kw): pass") - - self.check_suite("@staticmethod\n" - "def f(): pass") - self.check_suite("@staticmethod\n" - "@funcattrs(x, y)\n" - "def f(): pass") - self.check_suite("@funcattrs()\n" - "def f(): pass") - - self.check_suite("@False or x\n" - "def f(): pass") - self.check_suite("@d := x\n" - "def f(): pass") - self.check_suite("@lambda f: x(f)\n" - "def f(): pass") - self.check_suite("@[..., x, ...][1]\n" - "def f(): pass") - self.check_suite("@x(x)(x)\n" - "def f(): pass") - self.check_suite("@(x, x)\n" - "def f(): pass") - self.check_suite("@...\n" - "def f(): pass") - self.check_suite("@None\n" - "def f(): pass") - self.check_suite("@w @(x @y) @(z)\n" - "def f(): pass") - self.check_suite("@w[x].y.z\n" - "def f(): pass") - - # keyword-only arguments - self.check_suite("def f(*, a): pass") - self.check_suite("def f(*, a = 5): pass") - self.check_suite("def f(*, a = 5, b): pass") - self.check_suite("def f(*, a, b = 5): pass") - self.check_suite("def f(*, a, b = 5, **kwds): pass") - self.check_suite("def f(*args, a): pass") - self.check_suite("def f(*args, a = 5): pass") - self.check_suite("def f(*args, a = 5, b): pass") - self.check_suite("def f(*args, a, b = 5): pass") - self.check_suite("def f(*args, a, b = 5, **kwds): pass") - - # positional-only arguments - self.check_suite("def f(a, /): pass") - self.check_suite("def f(a, /,): pass") - self.check_suite("def f(a, b, /): pass") - self.check_suite("def f(a, b, /, c): pass") - self.check_suite("def f(a, b, /, c = 6): pass") - self.check_suite("def f(a, b, /, c, *, d): pass") - self.check_suite("def f(a, b, /, c = 1, *, d): pass") - self.check_suite("def f(a, b, /, c, *, d = 1): pass") - self.check_suite("def f(a, b=1, /, c=2, *, d = 3): pass") - self.check_suite("def f(a=0, b=1, /, c=2, *, d = 3): pass") - - # function annotations - self.check_suite("def f(a: int): pass") - self.check_suite("def f(a: int = 5): pass") - self.check_suite("def f(*args: list): pass") - self.check_suite("def f(**kwds: dict): pass") - self.check_suite("def f(*, a: int): pass") - self.check_suite("def f(*, a: int = 5): pass") - self.check_suite("def f() -> int: pass") - - def test_class_defs(self): - self.check_suite("class foo():pass") - self.check_suite("class foo(object):pass") - self.check_suite("@class_decorator\n" - "class foo():pass") - self.check_suite("@class_decorator(arg)\n" - "class foo():pass") - self.check_suite("@decorator1\n" - "@decorator2\n" - "class foo():pass") - - self.check_suite("@False or x\n" - "class C: pass") - self.check_suite("@d := x\n" - "class C: pass") - self.check_suite("@lambda f: x(f)\n" - "class C: pass") - self.check_suite("@[..., x, ...][1]\n" - "class C: pass") - self.check_suite("@x(x)(x)\n" - "class C: pass") - self.check_suite("@(x, x)\n" - "class C: pass") - self.check_suite("@...\n" - "class C: pass") - self.check_suite("@None\n" - "class C: pass") - self.check_suite("@w @(x @y) @(z)\n" - "class C: pass") - self.check_suite("@w[x].y.z\n" - "class C: pass") - - def test_import_from_statement(self): - self.check_suite("from sys.path import *") - self.check_suite("from sys.path import dirname") - self.check_suite("from sys.path import (dirname)") - self.check_suite("from sys.path import (dirname,)") - self.check_suite("from sys.path import dirname as my_dirname") - self.check_suite("from sys.path import (dirname as my_dirname)") - self.check_suite("from sys.path import (dirname as my_dirname,)") - self.check_suite("from sys.path import dirname, basename") - self.check_suite("from sys.path import (dirname, basename)") - self.check_suite("from sys.path import (dirname, basename,)") - self.check_suite( - "from sys.path import dirname as my_dirname, basename") - self.check_suite( - "from sys.path import (dirname as my_dirname, basename)") - self.check_suite( - "from sys.path import (dirname as my_dirname, basename,)") - self.check_suite( - "from sys.path import dirname, basename as my_basename") - self.check_suite( - "from sys.path import (dirname, basename as my_basename)") - self.check_suite( - "from sys.path import (dirname, basename as my_basename,)") - self.check_suite("from .bogus import x") - - def test_basic_import_statement(self): - self.check_suite("import sys") - self.check_suite("import sys as system") - self.check_suite("import sys, math") - self.check_suite("import sys as system, math") - self.check_suite("import sys, math as my_math") - - def test_relative_imports(self): - self.check_suite("from . import name") - self.check_suite("from .. import name") - # check all the way up to '....', since '...' is tokenized - # differently from '.' (it's an ellipsis token). - self.check_suite("from ... import name") - self.check_suite("from .... import name") - self.check_suite("from .pkg import name") - self.check_suite("from ..pkg import name") - self.check_suite("from ...pkg import name") - self.check_suite("from ....pkg import name") - - def test_pep263(self): - self.check_suite("# -*- coding: iso-8859-1 -*-\n" - "pass\n") - - def test_assert(self): - self.check_suite("assert alo < ahi and blo < bhi\n") - - def test_with(self): - self.check_suite("with open('x'): pass\n") - self.check_suite("with open('x') as f: pass\n") - self.check_suite("with open('x') as f, open('y') as g: pass\n") - - def test_try_stmt(self): - self.check_suite("try: pass\nexcept: pass\n") - self.check_suite("try: pass\nfinally: pass\n") - self.check_suite("try: pass\nexcept A: pass\nfinally: pass\n") - self.check_suite("try: pass\nexcept A: pass\nexcept: pass\n" - "finally: pass\n") - self.check_suite("try: pass\nexcept: pass\nelse: pass\n") - self.check_suite("try: pass\nexcept: pass\nelse: pass\n" - "finally: pass\n") - - def test_if_stmt(self): - self.check_suite("if True:\n pass\nelse:\n pass\n") - self.check_suite("if True:\n pass\nelif True:\n pass\nelse:\n pass\n") - - def test_position(self): - # An absolutely minimal test of position information. Better - # tests would be a big project. - code = "def f(x):\n return x + 1" - st = parser.suite(code) - - def walk(tree): - node_type = tree[0] - next = tree[1] - if isinstance(next, (tuple, list)): - for elt in tree[1:]: - for x in walk(elt): - yield x - else: - yield tree - - expected = [ - (1, 'def', 1, 0), - (1, 'f', 1, 4), - (7, '(', 1, 5), - (1, 'x', 1, 6), - (8, ')', 1, 7), - (11, ':', 1, 8), - (4, '', 1, 9), - (5, '', 2, -1), - (1, 'return', 2, 4), - (1, 'x', 2, 11), - (14, '+', 2, 13), - (2, '1', 2, 15), - (4, '', 2, 16), - (6, '', 2, -1), - (4, '', 2, -1), - (0, '', 2, -1), - ] - - self.assertEqual(list(walk(st.totuple(line_info=True, col_info=True))), - expected) - self.assertEqual(list(walk(st.totuple())), - [(t, n) for t, n, l, c in expected]) - self.assertEqual(list(walk(st.totuple(line_info=True))), - [(t, n, l) for t, n, l, c in expected]) - self.assertEqual(list(walk(st.totuple(col_info=True))), - [(t, n, c) for t, n, l, c in expected]) - self.assertEqual(list(walk(st.tolist(line_info=True, col_info=True))), - [list(x) for x in expected]) - self.assertEqual(list(walk(parser.st2tuple(st, line_info=True, - col_info=True))), - expected) - self.assertEqual(list(walk(parser.st2list(st, line_info=True, - col_info=True))), - [list(x) for x in expected]) - - def test_extended_unpacking(self): - self.check_suite("*a = y") - self.check_suite("x, *b, = m") - self.check_suite("[*a, *b] = y") - self.check_suite("for [*x, b] in x: pass") - - def test_raise_statement(self): - self.check_suite("raise\n") - self.check_suite("raise e\n") - self.check_suite("try:\n" - " suite\n" - "except Exception as e:\n" - " raise ValueError from e\n") - - def test_list_displays(self): - self.check_expr('[]') - self.check_expr('[*{2}, 3, *[4]]') - - def test_set_displays(self): - self.check_expr('{*{2}, 3, *[4]}') - self.check_expr('{2}') - self.check_expr('{2,}') - self.check_expr('{2, 3}') - self.check_expr('{2, 3,}') - - def test_dict_displays(self): - self.check_expr('{}') - self.check_expr('{a:b}') - self.check_expr('{a:b,}') - self.check_expr('{a:b, c:d}') - self.check_expr('{a:b, c:d,}') - self.check_expr('{**{}}') - self.check_expr('{**{}, 3:4, **{5:6, 7:8}}') - - def test_argument_unpacking(self): - self.check_expr("f(*a, **b)") - self.check_expr('f(a, *b, *c, *d)') - self.check_expr('f(**a, **b)') - self.check_expr('f(2, *a, *b, **b, **c, **d)') - self.check_expr("f(*b, *() or () and (), **{} and {}, **() or {})") - - def test_set_comprehensions(self): - self.check_expr('{x for x in seq}') - self.check_expr('{f(x) for x in seq}') - self.check_expr('{f(x) for x in seq if condition(x)}') - - def test_dict_comprehensions(self): - self.check_expr('{x:x for x in seq}') - self.check_expr('{x**2:x[3] for x in seq if condition(x)}') - self.check_expr('{x:x for x in seq1 for y in seq2 if condition(x, y)}') - - def test_named_expressions(self): - self.check_suite("(a := 1)") - self.check_suite("(a := a)") - self.check_suite("if (match := pattern.search(data)) is None: pass") - self.check_suite("while match := pattern.search(f.read()): pass") - self.check_suite("[y := f(x), y**2, y**3]") - self.check_suite("filtered_data = [y for x in data if (y := f(x)) is None]") - self.check_suite("(y := f(x))") - self.check_suite("y0 = (y1 := f(x))") - self.check_suite("foo(x=(y := f(x)))") - self.check_suite("def foo(answer=(p := 42)): pass") - self.check_suite("def foo(answer: (p := 42) = 5): pass") - self.check_suite("lambda: (x := 1)") - self.check_suite("(x := lambda: 1)") - self.check_suite("(x := lambda: (y := 1))") # not in PEP - self.check_suite("lambda line: (m := re.match(pattern, line)) and m.group(1)") - self.check_suite("x = (y := 0)") - self.check_suite("(z:=(y:=(x:=0)))") - self.check_suite("(info := (name, phone, *rest))") - self.check_suite("(x:=1,2)") - self.check_suite("(total := total + tax)") - self.check_suite("len(lines := f.readlines())") - self.check_suite("foo(x := 3, cat='vector')") - self.check_suite("foo(cat=(category := 'vector'))") - self.check_suite("if any(len(longline := l) >= 100 for l in lines): print(longline)") - self.check_suite( - "if env_base := os.environ.get('PYTHONUSERBASE', None): return env_base" - ) - self.check_suite( - "if self._is_special and (ans := self._check_nans(context=context)): return ans" - ) - self.check_suite("foo(b := 2, a=1)") - self.check_suite("foo(b := 2, a=1)") - self.check_suite("foo((b := 2), a=1)") - self.check_suite("foo(c=(b := 2), a=1)") - self.check_suite("{(x := C(i)).q: x for i in y}") - - -# -# Second, we take *invalid* trees and make sure we get ParserError -# rejections for them. -# - -class IllegalSyntaxTestCase(unittest.TestCase): - - def check_bad_tree(self, tree, label): - try: - parser.sequence2st(tree) - except parser.ParserError: - pass - else: - self.fail("did not detect invalid tree for %r" % label) - - def test_junk(self): - # not even remotely valid: - self.check_bad_tree((1, 2, 3), "") - - def test_illegal_terminal(self): - tree = \ - (257, - (269, - (270, - (271, - (277, - (1,))), - (4, ''))), - (4, ''), - (0, '')) - self.check_bad_tree(tree, "too small items in terminal node") - tree = \ - (257, - (269, - (270, - (271, - (277, - (1, b'pass'))), - (4, ''))), - (4, ''), - (0, '')) - self.check_bad_tree(tree, "non-string second item in terminal node") - tree = \ - (257, - (269, - (270, - (271, - (277, - (1, 'pass', '0', 0))), - (4, ''))), - (4, ''), - (0, '')) - self.check_bad_tree(tree, "non-integer third item in terminal node") - tree = \ - (257, - (269, - (270, - (271, - (277, - (1, 'pass', 0, 0))), - (4, ''))), - (4, ''), - (0, '')) - self.check_bad_tree(tree, "too many items in terminal node") - - def test_illegal_yield_1(self): - # Illegal yield statement: def f(): return 1; yield 1 - tree = \ - (257, - (264, - (285, - (259, - (1, 'def'), - (1, 'f'), - (260, (7, '('), (8, ')')), - (11, ':'), - (291, - (4, ''), - (5, ''), - (264, - (265, - (266, - (272, - (275, - (1, 'return'), - (313, - (292, - (293, - (294, - (295, - (297, - (298, - (299, - (300, - (301, - (302, (303, (304, (305, (2, '1')))))))))))))))))), - (264, - (265, - (266, - (272, - (276, - (1, 'yield'), - (313, - (292, - (293, - (294, - (295, - (297, - (298, - (299, - (300, - (301, - (302, - (303, (304, (305, (2, '1')))))))))))))))))), - (4, ''))), - (6, ''))))), - (4, ''), - (0, '')))) - self.check_bad_tree(tree, "def f():\n return 1\n yield 1") - - def test_illegal_yield_2(self): - # Illegal return in generator: def f(): return 1; yield 1 - tree = \ - (257, - (264, - (265, - (266, - (278, - (1, 'from'), - (281, (1, '__future__')), - (1, 'import'), - (279, (1, 'generators')))), - (4, ''))), - (264, - (285, - (259, - (1, 'def'), - (1, 'f'), - (260, (7, '('), (8, ')')), - (11, ':'), - (291, - (4, ''), - (5, ''), - (264, - (265, - (266, - (272, - (275, - (1, 'return'), - (313, - (292, - (293, - (294, - (295, - (297, - (298, - (299, - (300, - (301, - (302, (303, (304, (305, (2, '1')))))))))))))))))), - (264, - (265, - (266, - (272, - (276, - (1, 'yield'), - (313, - (292, - (293, - (294, - (295, - (297, - (298, - (299, - (300, - (301, - (302, - (303, (304, (305, (2, '1')))))))))))))))))), - (4, ''))), - (6, ''))))), - (4, ''), - (0, '')))) - self.check_bad_tree(tree, "def f():\n return 1\n yield 1") - - def test_a_comma_comma_c(self): - # Illegal input: a,,c - tree = \ - (258, - (311, - (290, - (291, - (292, - (293, - (295, - (296, - (297, - (298, (299, (300, (301, (302, (303, (1, 'a')))))))))))))), - (12, ','), - (12, ','), - (290, - (291, - (292, - (293, - (295, - (296, - (297, - (298, (299, (300, (301, (302, (303, (1, 'c'))))))))))))))), - (4, ''), - (0, '')) - self.check_bad_tree(tree, "a,,c") - - def test_illegal_operator(self): - # Illegal input: a $= b - tree = \ - (257, - (264, - (265, - (266, - (267, - (312, - (291, - (292, - (293, - (294, - (296, - (297, - (298, - (299, - (300, (301, (302, (303, (304, (1, 'a'))))))))))))))), - (268, (37, '$=')), - (312, - (291, - (292, - (293, - (294, - (296, - (297, - (298, - (299, - (300, (301, (302, (303, (304, (1, 'b'))))))))))))))))), - (4, ''))), - (0, '')) - self.check_bad_tree(tree, "a $= b") - - def test_malformed_global(self): - #doesn't have global keyword in ast - tree = (257, - (264, - (265, - (266, - (282, (1, 'foo'))), (4, ''))), - (4, ''), - (0, '')) - self.check_bad_tree(tree, "malformed global ast") - - def test_missing_import_source(self): - # from import fred - tree = \ - (257, - (268, - (269, - (270, - (282, - (284, (1, 'from'), (1, 'import'), - (287, (285, (1, 'fred')))))), - (4, ''))), - (4, ''), (0, '')) - self.check_bad_tree(tree, "from import fred") - - def test_illegal_encoding(self): - # Illegal encoding declaration - tree = \ - (341, - (257, (0, ''))) - self.check_bad_tree(tree, "missed encoding") - tree = \ - (341, - (257, (0, '')), - b'iso-8859-1') - self.check_bad_tree(tree, "non-string encoding") - tree = \ - (341, - (257, (0, '')), - '\udcff') - with self.assertRaises(UnicodeEncodeError): - parser.sequence2st(tree) - - def test_invalid_node_id(self): - tree = (257, (269, (-7, ''))) - self.check_bad_tree(tree, "negative node id") - tree = (257, (269, (99, ''))) - self.check_bad_tree(tree, "invalid token id") - tree = (257, (269, (9999, (0, '')))) - self.check_bad_tree(tree, "invalid symbol id") - - def test_ParserError_message(self): - try: - parser.sequence2st((257,(269,(257,(0,''))))) - except parser.ParserError as why: - self.assertIn("compound_stmt", str(why)) # Expected - self.assertIn("file_input", str(why)) # Got - - - -class CompileTestCase(unittest.TestCase): - - # These tests are very minimal. :-( - - def test_compile_expr(self): - st = parser.expr('2 + 3') - code = parser.compilest(st) - self.assertEqual(eval(code), 5) - - def test_compile_suite(self): - st = parser.suite('x = 2; y = x + 3') - code = parser.compilest(st) - globs = {} - exec(code, globs) - self.assertEqual(globs['y'], 5) - - def test_compile_error(self): - st = parser.suite('1 = 3 + 4') - self.assertRaises(SyntaxError, parser.compilest, st) - - def test_compile_badunicode(self): - st = parser.suite('a = "\\U12345678"') - self.assertRaises(SyntaxError, parser.compilest, st) - st = parser.suite('a = "\\u1"') - self.assertRaises(SyntaxError, parser.compilest, st) - - def test_issue_9011(self): - # Issue 9011: compilation of an unary minus expression changed - # the meaning of the ST, so that a second compilation produced - # incorrect results. - st = parser.expr('-3') - code1 = parser.compilest(st) - self.assertEqual(eval(code1), -3) - code2 = parser.compilest(st) - self.assertEqual(eval(code2), -3) - - def test_compile_filename(self): - st = parser.expr('a + 5') - code = parser.compilest(st) - self.assertEqual(code.co_filename, '') - code = st.compile() - self.assertEqual(code.co_filename, '') - for filename in 'file.py', b'file.py': - code = parser.compilest(st, filename) - self.assertEqual(code.co_filename, 'file.py') - code = st.compile(filename) - self.assertEqual(code.co_filename, 'file.py') - for filename in bytearray(b'file.py'), memoryview(b'file.py'): - with self.assertWarns(DeprecationWarning): - code = parser.compilest(st, filename) - self.assertEqual(code.co_filename, 'file.py') - with self.assertWarns(DeprecationWarning): - code = st.compile(filename) - self.assertEqual(code.co_filename, 'file.py') - self.assertRaises(TypeError, parser.compilest, st, list(b'file.py')) - self.assertRaises(TypeError, st.compile, list(b'file.py')) - - -class ParserStackLimitTestCase(unittest.TestCase): - """try to push the parser to/over its limits. - see http://bugs.python.org/issue1881 for a discussion - """ - def _nested_expression(self, level): - return "["*level+"]"*level - - def test_deeply_nested_list(self): - # This has fluctuated between 99 levels in 2.x, down to 93 levels in - # 3.7.X and back up to 99 in 3.8.X. Related to MAXSTACK size in Parser.h - e = self._nested_expression(99) - st = parser.expr(e) - st.compile() - - def test_trigger_memory_error(self): - e = self._nested_expression(100) - rc, out, err = assert_python_failure('-Xoldparser', '-c', e) - # parsing the expression will result in an error message - # followed by a MemoryError (see #11963) - self.assertIn(b's_push: parser stack overflow', err) - self.assertIn(b'MemoryError', err) - -class STObjectTestCase(unittest.TestCase): - """Test operations on ST objects themselves""" - - def test_comparisons(self): - # ST objects should support order and equality comparisons - st1 = parser.expr('2 + 3') - st2 = parser.suite('x = 2; y = x + 3') - st3 = parser.expr('list(x**3 for x in range(20))') - st1_copy = parser.expr('2 + 3') - st2_copy = parser.suite('x = 2; y = x + 3') - st3_copy = parser.expr('list(x**3 for x in range(20))') - - # exercise fast path for object identity - self.assertEqual(st1 == st1, True) - self.assertEqual(st2 == st2, True) - self.assertEqual(st3 == st3, True) - # slow path equality - self.assertEqual(st1, st1_copy) - self.assertEqual(st2, st2_copy) - self.assertEqual(st3, st3_copy) - self.assertEqual(st1 == st2, False) - self.assertEqual(st1 == st3, False) - self.assertEqual(st2 == st3, False) - self.assertEqual(st1 != st1, False) - self.assertEqual(st2 != st2, False) - self.assertEqual(st3 != st3, False) - self.assertEqual(st1 != st1_copy, False) - self.assertEqual(st2 != st2_copy, False) - self.assertEqual(st3 != st3_copy, False) - self.assertEqual(st2 != st1, True) - self.assertEqual(st1 != st3, True) - self.assertEqual(st3 != st2, True) - # we don't particularly care what the ordering is; just that - # it's usable and self-consistent - self.assertEqual(st1 < st2, not (st2 <= st1)) - self.assertEqual(st1 < st3, not (st3 <= st1)) - self.assertEqual(st2 < st3, not (st3 <= st2)) - self.assertEqual(st1 < st2, st2 > st1) - self.assertEqual(st1 < st3, st3 > st1) - self.assertEqual(st2 < st3, st3 > st2) - self.assertEqual(st1 <= st2, st2 >= st1) - self.assertEqual(st3 <= st1, st1 >= st3) - self.assertEqual(st2 <= st3, st3 >= st2) - # transitivity - bottom = min(st1, st2, st3) - top = max(st1, st2, st3) - mid = sorted([st1, st2, st3])[1] - self.assertTrue(bottom < mid) - self.assertTrue(bottom < top) - self.assertTrue(mid < top) - self.assertTrue(bottom <= mid) - self.assertTrue(bottom <= top) - self.assertTrue(mid <= top) - self.assertTrue(bottom <= bottom) - self.assertTrue(mid <= mid) - self.assertTrue(top <= top) - # interaction with other types - self.assertEqual(st1 == 1588.602459, False) - self.assertEqual('spanish armada' != st2, True) - self.assertRaises(TypeError, operator.ge, st3, None) - self.assertRaises(TypeError, operator.le, False, st1) - self.assertRaises(TypeError, operator.lt, st1, 1815) - self.assertRaises(TypeError, operator.gt, b'waterloo', st2) - - def test_copy_pickle(self): - sts = [ - parser.expr('2 + 3'), - parser.suite('x = 2; y = x + 3'), - parser.expr('list(x**3 for x in range(20))') - ] - for st in sts: - st_copy = copy.copy(st) - self.assertEqual(st_copy.totuple(), st.totuple()) - st_copy = copy.deepcopy(st) - self.assertEqual(st_copy.totuple(), st.totuple()) - for proto in range(pickle.HIGHEST_PROTOCOL+1): - st_copy = pickle.loads(pickle.dumps(st, proto)) - self.assertEqual(st_copy.totuple(), st.totuple()) - - check_sizeof = support.check_sizeof - - @support.cpython_only - def test_sizeof(self): - def XXXROUNDUP(n): - if n <= 1: - return n - if n <= 128: - return (n + 3) & ~3 - return 1 << (n - 1).bit_length() - - basesize = support.calcobjsize('Piii') - nodesize = struct.calcsize('hP3iP0h2i') - def sizeofchildren(node): - if node is None: - return 0 - res = 0 - hasstr = len(node) > 1 and isinstance(node[-1], str) - if hasstr: - res += len(node[-1]) + 1 - children = node[1:-1] if hasstr else node[1:] - if children: - res += XXXROUNDUP(len(children)) * nodesize - for child in children: - res += sizeofchildren(child) - return res - - def check_st_sizeof(st): - self.check_sizeof(st, basesize + nodesize + - sizeofchildren(st.totuple())) - - check_st_sizeof(parser.expr('2 + 3')) - check_st_sizeof(parser.expr('2 + 3 + 4')) - check_st_sizeof(parser.suite('x = 2 + 3')) - check_st_sizeof(parser.suite('')) - check_st_sizeof(parser.suite('# -*- coding: utf-8 -*-')) - check_st_sizeof(parser.expr('[' + '2,' * 1000 + ']')) - - - # XXX tests for pickling and unpickling of ST objects should go here - -class OtherParserCase(unittest.TestCase): - - def test_two_args_to_expr(self): - # See bug #12264 - with self.assertRaises(TypeError): - parser.expr("a", "b") - - -class TestDeprecation(unittest.TestCase): - def test_deprecation_message(self): - code = "def f():\n import parser\n\nf()" - rc, out, err = assert_python_ok('-c', code) - self.assertIn(b':2: DeprecationWarning', err) - - -if __name__ == "__main__": - unittest.main() diff --git a/Lib/test/test_peg_parser.py b/Lib/test/test_peg_parser.py deleted file mode 100644 index fae85e323da04..0000000000000 --- a/Lib/test/test_peg_parser.py +++ /dev/null @@ -1,803 +0,0 @@ -import ast -import _peg_parser as peg_parser -import unittest -from typing import Any, Union, Iterable, Tuple -from textwrap import dedent -from test import support - - -TEST_CASES = [ - ('annotated_assignment', 'x: int = 42'), - ('annotated_assignment_with_tuple', 'x: tuple = 1, 2'), - ('annotated_assignment_with_parens', '(paren): int = 3+2'), - ('annotated_assignment_with_yield', 'x: int = yield 42'), - ('annotated_no_assignment', 'x: int'), - ('annotation_with_multiple_parens', '((parens)): int'), - ('annotation_with_parens', '(parens): int'), - ('annotated_assignment_with_attr', 'a.b: int'), - ('annotated_assignment_with_subscript', 'a[b]: int'), - ('annotated_assignment_with_attr_and_parens', '(a.b): int'), - ('annotated_assignment_with_subscript_and_parens', '(a[b]): int'), - ('assert', 'assert a'), - ('assert_message', 'assert a, b'), - ('assignment_false', 'a = False'), - ('assignment_none', 'a = None'), - ('assignment_true', 'a = True'), - ('assignment_paren', '(a) = 42'), - ('assignment_paren_multiple', '(a, b) = (0, 1)'), - ('asyncfor', - ''' - async for i in a: - pass - '''), - ('attribute_call', 'a.b()'), - ('attribute_multiple_names', 'abcd.efg.hij'), - ('attribute_simple', 'a.b'), - ('attributes_subscript', 'a.b[0]'), - ('augmented_assignment', 'x += 42'), - ('augmented_assignment_attribute', 'a.b.c += 42'), - ('augmented_assignment_paren', '(x) += 42'), - ('augmented_assignment_paren_subscript', '(x[0]) -= 42'), - ('binop_add', '1 + 1'), - ('binop_add_multiple', '1 + 1 + 1 + 1'), - ('binop_all', '1 + 2 * 5 + 3 ** 2 - -3'), - ('binop_boolop_comp', '1 + 1 == 2 or 1 + 1 == 3 and not b'), - ('boolop_or', 'a or b'), - ('boolop_or_multiple', 'a or b or c'), - ('class_def_bases', - ''' - class C(A, B): - pass - '''), - ('class_def_decorators', - ''' - @a - class C: - pass - '''), - ('class_def_decorator_with_expression', - ''' - @lambda x: 42 - class C: - pass - '''), - ('class_def_decorator_with_expression_and_walrus', - ''' - @x:=lambda x: 42 - class C: - pass - '''), - - ('class_def_keywords', - ''' - class C(keyword=a+b, **c): - pass - '''), - ('class_def_mixed', - ''' - class C(A, B, keyword=0, **a): - pass - '''), - ('class_def_simple', - ''' - class C: - pass - '''), - ('class_def_starred_and_kwarg', - ''' - class C(A, B, *x, **y): - pass - '''), - ('class_def_starred_in_kwargs', - ''' - class C(A, x=2, *[B, C], y=3): - pass - '''), - ('call_attribute', 'f().b'), - ('call_genexp', 'f(i for i in a)'), - ('call_mixed_args', 'f(a, b, *c, **d)'), - ('call_mixed_args_named', 'f(a, b, *c, d=4, **v)'), - ('call_one_arg', 'f(a)'), - ('call_posarg_genexp', 'f(a, (i for i in a))'), - ('call_simple', 'f()'), - ('call_subscript', 'f()[0]'), - ('comp', 'a == b'), - ('comp_multiple', 'a == b == c'), - ('comp_paren_end', 'a == (b-1)'), - ('comp_paren_start', '(a-1) == b'), - ('decorator', - ''' - @a - def f(): - pass - '''), - ('decorator_async', - ''' - @a - async def d(): - pass - '''), - ('decorator_with_expression', - ''' - @lambda x: 42 - def f(): - pass - '''), - ('decorator_with_expression_and_walrus', - ''' - @x:=lambda x: 42 - def f(): - pass - '''), - ('del_attribute', 'del a.b'), - ('del_call_attribute', 'del a().c'), - ('del_call_genexp_attribute', 'del a(i for i in b).c'), - ('del_empty', 'del()'), - ('del_list', 'del a, [b, c]'), - ('del_mixed', 'del a[0].b().c'), - ('del_multiple', 'del a, b'), - ('del_multiple_calls_attribute', 'del a()().b'), - ('del_paren', 'del(a,b)'), - ('del_paren_single_target', 'del(a)'), - ('del_subscript_attribute', 'del a[0].b'), - ('del_tuple', 'del a, (b, c)'), - ('delete', 'del a'), - ('dict', - ''' - { - a: 1, - b: 2, - c: 3 - } - '''), - ('dict_comp', '{x:1 for x in a}'), - ('dict_comp_if', '{x:1+2 for x in a if b}'), - ('dict_empty', '{}'), - ('empty_line_after_linecont', - r''' - pass - \ - - pass - '''), - ('for', - ''' - for i in a: - pass - '''), - ('for_else', - ''' - for i in a: - pass - else: - pass - '''), - ('for_star_target_in_paren', 'for (a) in b: pass'), - ('for_star_targets_attribute', 'for a.b in c: pass'), - ('for_star_targets_call_attribute', 'for a().c in b: pass'), - ('for_star_targets_empty', 'for () in a: pass'), - ('for_star_targets_mixed', 'for a[0].b().c in d: pass'), - ('for_star_targets_mixed_starred', - ''' - for a, *b, (c, d) in e: - pass - '''), - ('for_star_targets_multiple', 'for a, b in c: pass'), - ('for_star_targets_nested_starred', 'for *[*a] in b: pass'), - ('for_star_targets_starred', 'for *a in b: pass'), - ('for_star_targets_subscript_attribute', 'for a[0].b in c: pass'), - ('for_star_targets_trailing_comma', - ''' - for a, (b, c), in d: - pass - '''), - ('for_star_targets_tuple', 'for a, (b, c) in d: pass'), - ('for_underscore', - ''' - for _ in a: - pass - '''), - ('function_return_type', - ''' - def f() -> Any: - pass - '''), - ('f-string_slice', "f'{x[2]}'"), - ('f-string_slice_upper', "f'{x[2:3]}'"), - ('f-string_slice_step', "f'{x[2:3:-2]}'"), - ('f-string_constant', "f'{42}'"), - ('f-string_boolop', "f'{x and y}'"), - ('f-string_named_expr', "f'{(x:=42)}'"), - ('f-string_binop', "f'{x+y}'"), - ('f-string_unaryop', "f'{not x}'"), - ('f-string_lambda', "f'{(lambda x, /, y, y2=42 , *z, k1, k2=34, **k3: 42)}'"), - ('f-string_lambda_call', "f'{(lambda: 2)(2)}'"), - ('f-string_ifexpr', "f'{x if y else z}'"), - ('f-string_dict', "f'{ {2:34, 3:34} }'"), - ('f-string_set', "f'{ {2,-45} }'"), - ('f-string_list', "f'{ [2,-45] }'"), - ('f-string_tuple', "f'{ (2,-45) }'"), - ('f-string_listcomp', "f'{[x for x in y if z]}'"), - ('f-string_setcomp', "f'{ {x for x in y if z} }'"), - ('f-string_dictcomp', "f'{ {x:x for x in y if z} }'"), - ('f-string_genexpr', "f'{ (x for x in y if z) }'"), - ('f-string_yield', "f'{ (yield x) }'"), - ('f-string_yieldfrom', "f'{ (yield from x) }'"), - ('f-string_await', "f'{ await x }'"), - ('f-string_compare', "f'{ x == y }'"), - ('f-string_call', "f'{ f(x,y,z) }'"), - ('f-string_attribute', "f'{ f.x.y.z }'"), - ('f-string_starred', "f'{ *x, }'"), - ('f-string_doublestarred', "f'{ {**x} }'"), - ('f-string_escape_brace', "f'{{Escape'"), - ('f-string_escape_closing_brace', "f'Escape}}'"), - ('f-string_repr', "f'{a!r}'"), - ('f-string_str', "f'{a!s}'"), - ('f-string_ascii', "f'{a!a}'"), - ('f-string_debug', "f'{a=}'"), - ('f-string_padding', "f'{a:03d}'"), - ('f-string_multiline', - """ - f''' - {hello} - ''' - """), - ('f-string_multiline_in_expr', - """ - f''' - { - hello - } - ''' - """), - ('f-string_multiline_in_call', - """ - f''' - {f( - a, b, c - )} - ''' - """), - ('global', 'global a, b'), - ('group', '(yield a)'), - ('if_elif', - ''' - if a: - pass - elif b: - pass - '''), - ('if_elif_elif', - ''' - if a: - pass - elif b: - pass - elif c: - pass - '''), - ('if_elif_else', - ''' - if a: - pass - elif b: - pass - else: - pass - '''), - ('if_else', - ''' - if a: - pass - else: - pass - '''), - ('if_simple', 'if a: pass'), - ('import', 'import a'), - ('import_alias', 'import a as b'), - ('import_dotted', 'import a.b'), - ('import_dotted_alias', 'import a.b as c'), - ('import_dotted_multichar', 'import ab.cd'), - ('import_from', 'from a import b'), - ('import_from_alias', 'from a import b as c'), - ('import_from_dotted', 'from a.b import c'), - ('import_from_dotted_alias', 'from a.b import c as d'), - ('import_from_multiple_aliases', 'from a import b as c, d as e'), - ('import_from_one_dot', 'from .a import b'), - ('import_from_one_dot_alias', 'from .a import b as c'), - ('import_from_star', 'from a import *'), - ('import_from_three_dots', 'from ...a import b'), - ('import_from_trailing_comma', 'from a import (b,)'), - ('kwarg', - ''' - def f(**a): - pass - '''), - ('kwonly_args', - ''' - def f(*, a, b): - pass - '''), - ('kwonly_args_with_default', - ''' - def f(*, a=2, b): - pass - '''), - ('lambda_kwarg', 'lambda **a: 42'), - ('lambda_kwonly_args', 'lambda *, a, b: 42'), - ('lambda_kwonly_args_with_default', 'lambda *, a=2, b: 42'), - ('lambda_mixed_args', 'lambda a, /, b, *, c: 42'), - ('lambda_mixed_args_with_default', 'lambda a, b=2, /, c=3, *e, f, **g: 42'), - ('lambda_no_args', 'lambda: 42'), - ('lambda_pos_args', 'lambda a,b: 42'), - ('lambda_pos_args_with_default', 'lambda a, b=2: 42'), - ('lambda_pos_only_args', 'lambda a, /: 42'), - ('lambda_pos_only_args_with_default', 'lambda a=0, /: 42'), - ('lambda_pos_posonly_args', 'lambda a, b, /, c, d: 42'), - ('lambda_pos_posonly_args_with_default', 'lambda a, b=0, /, c=2: 42'), - ('lambda_vararg', 'lambda *a: 42'), - ('lambda_vararg_kwonly_args', 'lambda *a, b: 42'), - ('list', '[1, 2, a]'), - ('list_comp', '[i for i in a]'), - ('list_comp_if', '[i for i in a if b]'), - ('list_trailing_comma', '[1+2, a, 3+4,]'), - ('mixed_args', - ''' - def f(a, /, b, *, c): - pass - '''), - ('mixed_args_with_default', - ''' - def f(a, b=2, /, c=3, *e, f, **g): - pass - '''), - ('multipart_string_bytes', 'b"Hola" b"Hello" b"Bye"'), - ('multipart_string_triple', '"""Something here""" "and now"'), - ('multipart_string_different_prefixes', 'u"Something" "Other thing" r"last thing"'), - ('multiple_assignments', 'x = y = z = 42'), - ('multiple_assignments_with_yield', 'x = y = z = yield 42'), - ('multiple_pass', - ''' - pass; pass - pass - '''), - ('namedexpr', '(x := [1, 2, 3])'), - ('namedexpr_false', '(x := False)'), - ('namedexpr_none', '(x := None)'), - ('namedexpr_true', '(x := True)'), - ('nonlocal', 'nonlocal a, b'), - ('number_complex', '-2.234+1j'), - ('number_float', '-34.2333'), - ('number_imaginary_literal', '1.1234j'), - ('number_integer', '-234'), - ('number_underscores', '1_234_567'), - ('pass', 'pass'), - ('pos_args', - ''' - def f(a, b): - pass - '''), - ('pos_args_with_default', - ''' - def f(a, b=2): - pass - '''), - ('pos_only_args', - ''' - def f(a, /): - pass - '''), - ('pos_only_args_with_default', - ''' - def f(a=0, /): - pass - '''), - ('pos_posonly_args', - ''' - def f(a, b, /, c, d): - pass - '''), - ('pos_posonly_args_with_default', - ''' - def f(a, b=0, /, c=2): - pass - '''), - ('primary_mixed', 'a.b.c().d[0]'), - ('raise', 'raise'), - ('raise_ellipsis', 'raise ...'), - ('raise_expr', 'raise a'), - ('raise_from', 'raise a from b'), - ('return', 'return'), - ('return_expr', 'return a'), - ('set', '{1, 2+4, 3+5}'), - ('set_comp', '{i for i in a}'), - ('set_trailing_comma', '{1, 2, 3,}'), - ('simple_assignment', 'x = 42'), - ('simple_assignment_with_yield', 'x = yield 42'), - ('string_bytes', 'b"hello"'), - ('string_concatenation_bytes', 'b"hello" b"world"'), - ('string_concatenation_simple', '"abcd" "efgh"'), - ('string_format_simple', 'f"hello"'), - ('string_format_with_formatted_value', 'f"hello {world}"'), - ('string_simple', '"hello"'), - ('string_unicode', 'u"hello"'), - ('subscript_attribute', 'a[0].b'), - ('subscript_call', 'a[b]()'), - ('subscript_multiple_slices', 'a[0:a:2, 1]'), - ('subscript_simple', 'a[0]'), - ('subscript_single_element_tuple', 'a[0,]'), - ('subscript_trailing_comma', 'a[0, 1, 2,]'), - ('subscript_tuple', 'a[0, 1, 2]'), - ('subscript_whole_slice', 'a[0+1:b:c]'), - ('try_except', - ''' - try: - pass - except: - pass - '''), - ('try_except_else', - ''' - try: - pass - except: - pass - else: - pass - '''), - ('try_except_else_finally', - ''' - try: - pass - except: - pass - else: - pass - finally: - pass - '''), - ('try_except_expr', - ''' - try: - pass - except a: - pass - '''), - ('try_except_expr_target', - ''' - try: - pass - except a as b: - pass - '''), - ('try_except_finally', - ''' - try: - pass - except: - pass - finally: - pass - '''), - ('try_finally', - ''' - try: - pass - finally: - pass - '''), - ('unpacking_binop', '[*([1, 2, 3] + [3, 4, 5])]'), - ('unpacking_call', '[*b()]'), - ('unpacking_compare', '[*(x < y)]'), - ('unpacking_constant', '[*3]'), - ('unpacking_dict', '[*{1: 2, 3: 4}]'), - ('unpacking_dict_comprehension', '[*{x:y for x,y in z}]'), - ('unpacking_ifexpr', '[*([1, 2, 3] if x else y)]'), - ('unpacking_list', '[*[1,2,3]]'), - ('unpacking_list_comprehension', '[*[x for x in y]]'), - ('unpacking_namedexpr', '[*(x:=[1, 2, 3])]'), - ('unpacking_set', '[*{1,2,3}]'), - ('unpacking_set_comprehension', '[*{x for x in y}]'), - ('unpacking_string', '[*"myvalue"]'), - ('unpacking_tuple', '[*(1,2,3)]'), - ('unpacking_unaryop', '[*(not [1, 2, 3])]'), - ('unpacking_yield', '[*(yield 42)]'), - ('unpacking_yieldfrom', '[*(yield from x)]'), - ('tuple', '(1, 2, 3)'), - ('vararg', - ''' - def f(*a): - pass - '''), - ('vararg_kwonly_args', - ''' - def f(*a, b): - pass - '''), - ('while', - ''' - while a: - pass - '''), - ('while_else', - ''' - while a: - pass - else: - pass - '''), - ('with', - ''' - with a: - pass - '''), - ('with_as', - ''' - with a as b: - pass - '''), - ('with_as_paren', - ''' - with a as (b): - pass - '''), - ('with_as_empty', 'with a as (): pass'), - ('with_list_recursive', - ''' - with a as [x, [y, z]]: - pass - '''), - ('with_tuple_recursive', - ''' - with a as ((x, y), z): - pass - '''), - ('with_tuple_target', - ''' - with a as (x, y): - pass - '''), - ('with_list_target', - ''' - with a as [x, y]: - pass - '''), - ('yield', 'yield'), - ('yield_expr', 'yield a'), - ('yield_from', 'yield from a'), -] - -FAIL_TEST_CASES = [ - ("annotation_multiple_targets", "(a, b): int = 42"), - ("annotation_nested_tuple", "((a, b)): int"), - ("annotation_list", "[a]: int"), - ("annotation_lambda", "lambda: int = 42"), - ("annotation_tuple", "(a,): int"), - ("annotation_tuple_without_paren", "a,: int"), - ("assignment_keyword", "a = if"), - ("augmented_assignment_list", "[a, b] += 1"), - ("augmented_assignment_tuple", "a, b += 1"), - ("augmented_assignment_tuple_paren", "(a, b) += (1, 2)"), - ("comprehension_lambda", "(a for a in lambda: b)"), - ("comprehension_else", "(a for a in b if c else d"), - ("del_call", "del a()"), - ("del_call_genexp", "del a(i for i in b)"), - ("del_subscript_call", "del a[b]()"), - ("del_attribute_call", "del a.b()"), - ("del_mixed_call", "del a[0].b().c.d()"), - ("for_star_targets_call", "for a() in b: pass"), - ("for_star_targets_subscript_call", "for a[b]() in c: pass"), - ("for_star_targets_attribute_call", "for a.b() in c: pass"), - ("for_star_targets_mixed_call", "for a[0].b().c.d() in e: pass"), - ("for_star_targets_in", "for a, in in b: pass"), - ("f-string_assignment", "f'{x = 42}'"), - ("f-string_empty", "f'{}'"), - ("f-string_function_def", "f'{def f(): pass}'"), - ("f-string_lambda", "f'{lambda x: 42}'"), - ("f-string_singe_brace", "f'{'"), - ("f-string_single_closing_brace", "f'}'"), - ("from_import_invalid", "from import import a"), - ("from_import_trailing_comma", "from a import b,"), - ("import_non_ascii_syntax_error", "import ? ?"), - # This test case checks error paths involving tokens with uninitialized - # values of col_offset and end_col_offset. - ("invalid indentation", - """ - def f(): - a - a - """), - ("not_terminated_string", "a = 'example"), - ("try_except_attribute_target", - """ - try: - pass - except Exception as a.b: - pass - """), - ("try_except_subscript_target", - """ - try: - pass - except Exception as a[0]: - pass - """), -] - -FAIL_SPECIALIZED_MESSAGE_CASES = [ - ("f(x, y, z=1, **b, *a", "iterable argument unpacking follows keyword argument unpacking"), - ("f(x, y=1, *z, **a, b", "positional argument follows keyword argument unpacking"), - ("f(x, y, z=1, a=2, b", "positional argument follows keyword argument"), - ("True = 1", "cannot assign to True"), - ("a() = 1", "cannot assign to function call"), - ("(a, b): int", "only single target (not tuple) can be annotated"), - ("[a, b]: int", "only single target (not list) can be annotated"), - ("a(): int", "illegal target for annotation"), - ("1 += 1", "'literal' is an illegal expression for augmented assignment"), - ("pass\n pass", "unexpected indent"), - ("def f():\npass", "expected an indented block"), - ("def f(*): pass", "named arguments must follow bare *"), - ("def f(*,): pass", "named arguments must follow bare *"), - ("def f(*, **a): pass", "named arguments must follow bare *"), - ("lambda *: pass", "named arguments must follow bare *"), - ("lambda *,: pass", "named arguments must follow bare *"), - ("lambda *, **a: pass", "named arguments must follow bare *"), - ("f(g()=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), - ("f(a, b, *c, d.e=2", "expression cannot contain assignment, perhaps you meant \"==\"?"), - ("f(*a, **b, c=0, d[1]=3)", "expression cannot contain assignment, perhaps you meant \"==\"?"), -] - -GOOD_BUT_FAIL_TEST_CASES = [ - ('string_concatenation_format', 'f"{hello} world" f"again {and_again}"'), - ('string_concatenation_multiple', - ''' - f"hello" f"{world} again" f"and_again" - '''), - ('f-string_multiline_comp', - """ - f''' - {(i for i in a - if b)} - ''' - """), -] - -FSTRINGS_TRACEBACKS = { - 'multiline_fstrings_same_line_with_brace': ( - """ - f''' - {a$b} - ''' - """, - '(a$b)', - ), - 'multiline_fstring_brace_on_next_line': ( - """ - f''' - {a$b - }''' - """, - '(a$b', - ), - 'multiline_fstring_brace_on_previous_line': ( - """ - f''' - { - a$b}''' - """, - 'a$b)', - ), -} - -EXPRESSIONS_TEST_CASES = [ - ("expression_add", "1+1"), - ("expression_add_2", "a+b"), - ("expression_call", "f(a, b=2, **kw)"), - ("expression_tuple", "1, 2, 3"), - ("expression_tuple_one_value", "1,") -] - - -def cleanup_source(source: Any) -> str: - if isinstance(source, str): - result = dedent(source) - elif not isinstance(source, (list, tuple)): - result = "\n".join(source) - else: - raise TypeError(f"Invalid type for test source: {source}") - return result - - -def prepare_test_cases( - test_cases: Iterable[Tuple[str, Union[str, Iterable[str]]]] -) -> Tuple[Iterable[str], Iterable[str]]: - - test_ids, _test_sources = zip(*test_cases) - test_sources = list(_test_sources) - for index, source in enumerate(test_sources): - result = cleanup_source(source) - test_sources[index] = result - return test_ids, test_sources - - -TEST_IDS, TEST_SOURCES = prepare_test_cases(TEST_CASES) - -GOOD_BUT_FAIL_TEST_IDS, GOOD_BUT_FAIL_SOURCES = prepare_test_cases( - GOOD_BUT_FAIL_TEST_CASES -) - -FAIL_TEST_IDS, FAIL_SOURCES = prepare_test_cases(FAIL_TEST_CASES) - -EXPRESSIONS_TEST_IDS, EXPRESSIONS_TEST_SOURCES = prepare_test_cases( - EXPRESSIONS_TEST_CASES -) - - -class ASTGenerationTest(unittest.TestCase): - def test_correct_ast_generation_on_source_files(self) -> None: - self.maxDiff = None - for source in TEST_SOURCES: - actual_ast = peg_parser.parse_string(source) - expected_ast = peg_parser.parse_string(source, oldparser=True) - self.assertEqual( - ast.dump(actual_ast, include_attributes=True), - ast.dump(expected_ast, include_attributes=True), - f"Wrong AST generation for source: {source}", - ) - - def test_incorrect_ast_generation_on_source_files(self) -> None: - for source in FAIL_SOURCES: - with self.assertRaises(SyntaxError, msg=f"Parsing {source} did not raise an exception"): - peg_parser.parse_string(source) - - def test_incorrect_ast_generation_with_specialized_errors(self) -> None: - for source, error_text in FAIL_SPECIALIZED_MESSAGE_CASES: - exc = IndentationError if "indent" in error_text else SyntaxError - with self.assertRaises(exc) as se: - peg_parser.parse_string(source) - self.assertTrue( - error_text in se.exception.msg, - f"Actual error message does not match expexted for {source}" - ) - - @unittest.expectedFailure - def test_correct_but_known_to_fail_ast_generation_on_source_files(self) -> None: - for source in GOOD_BUT_FAIL_SOURCES: - actual_ast = peg_parser.parse_string(source) - expected_ast = peg_parser.parse_string(source, oldparser=True) - self.assertEqual( - ast.dump(actual_ast, include_attributes=True), - ast.dump(expected_ast, include_attributes=True), - f"Wrong AST generation for source: {source}", - ) - - def test_correct_ast_generation_without_pos_info(self) -> None: - for source in GOOD_BUT_FAIL_SOURCES: - actual_ast = peg_parser.parse_string(source) - expected_ast = peg_parser.parse_string(source, oldparser=True) - self.assertEqual( - ast.dump(actual_ast), - ast.dump(expected_ast), - f"Wrong AST generation for source: {source}", - ) - - def test_fstring_parse_error_tracebacks(self) -> None: - for source, error_text in FSTRINGS_TRACEBACKS.values(): - with self.assertRaises(SyntaxError) as se: - peg_parser.parse_string(dedent(source)) - self.assertEqual(error_text, se.exception.text) - - def test_correct_ast_generatrion_eval(self) -> None: - for source in EXPRESSIONS_TEST_SOURCES: - actual_ast = peg_parser.parse_string(source, mode='eval') - expected_ast = peg_parser.parse_string(source, mode='eval', oldparser=True) - self.assertEqual( - ast.dump(actual_ast, include_attributes=True), - ast.dump(expected_ast, include_attributes=True), - f"Wrong AST generation for source: {source}", - ) - - def test_tokenizer_errors_are_propagated(self) -> None: - n=201 - with self.assertRaisesRegex(SyntaxError, "too many nested parentheses"): - peg_parser.parse_string(n*'(' + ')'*n) diff --git a/Lib/test/test_string_literals.py b/Lib/test/test_string_literals.py index 9565ee2485afd..7231970acf19d 100644 --- a/Lib/test/test_string_literals.py +++ b/Lib/test/test_string_literals.py @@ -33,7 +33,6 @@ import tempfile import unittest import warnings -from test.support import use_old_parser TEMPLATE = r"""# coding: %s @@ -168,8 +167,7 @@ def test_eval_bytes_invalid_escape(self): eval("b'''\n\\z'''") self.assertEqual(len(w), 1) self.assertEqual(w[0].filename, '') - if use_old_parser(): - self.assertEqual(w[0].lineno, 1) + self.assertEqual(w[0].lineno, 1) with warnings.catch_warnings(record=True) as w: warnings.simplefilter('error', category=DeprecationWarning) @@ -178,8 +176,7 @@ def test_eval_bytes_invalid_escape(self): exc = cm.exception self.assertEqual(w, []) self.assertEqual(exc.filename, '') - if use_old_parser(): - self.assertEqual(exc.lineno, 1) + self.assertEqual(exc.lineno, 1) def test_eval_bytes_raw(self): self.assertEqual(eval(""" br'x' """), b'x') diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 0c207ec8fc07c..6ea9a55e6f380 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -733,7 +733,6 @@ def _check_error(self, code, errtext, def test_assign_call(self): self._check_error("f() = 1", "assign") - @unittest.skipIf(support.use_old_parser(), "The old parser cannot generate these error messages") def test_assign_del(self): self._check_error("del (,)", "invalid syntax") self._check_error("del 1", "delete literal") diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py index f9a5f2fc53e1e..925a6bc32e8ea 100644 --- a/Lib/test/test_traceback.py +++ b/Lib/test/test_traceback.py @@ -655,7 +655,6 @@ def outer_raise(): self.assertIn('inner_raise() # Marker', blocks[2]) self.check_zero_div(blocks[2]) - @unittest.skipIf(support.use_old_parser(), "Pegen is arguably better here, so no need to fix this") def test_syntax_error_offset_at_eol(self): # See #10186. def e(): diff --git a/Makefile.pre.in b/Makefile.pre.in index 5972dc7b49522..684171217167b 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -300,32 +300,24 @@ LIBFFI_INCLUDEDIR= @LIBFFI_INCLUDEDIR@ # Parser PEGEN_OBJS= \ - Parser/pegen/pegen.o \ - Parser/pegen/parse.o \ - Parser/pegen/parse_string.o \ - Parser/pegen/peg_api.o + Parser/pegen.o \ + Parser/parser.o \ + Parser/string_parser.o \ + Parser/peg_api.o PEGEN_HEADERS= \ $(srcdir)/Include/internal/pegen_interface.h \ - $(srcdir)/Parser/pegen/pegen.h \ - $(srcdir)/Parser/pegen/parse_string.h + $(srcdir)/Parser/pegen.h \ + $(srcdir)/Parser/string_parser.h POBJS= \ - Parser/acceler.o \ - Parser/grammar1.o \ - Parser/listnode.o \ - Parser/node.o \ - Parser/parser.o \ Parser/token.o \ -PARSER_OBJS= $(POBJS) $(PEGEN_OBJS) Parser/myreadline.o Parser/parsetok.o Parser/tokenizer.o +PARSER_OBJS= $(POBJS) $(PEGEN_OBJS) Parser/myreadline.o Parser/tokenizer.o PARSER_HEADERS= \ $(PEGEN_HEADERS) \ - $(srcdir)/Include/grammar.h \ - $(srcdir)/Include/parsetok.h \ - $(srcdir)/Parser/parser.h \ $(srcdir)/Parser/tokenizer.h ########################################################################## @@ -568,7 +560,7 @@ coverage-lcov: @echo # Force regeneration of parser and importlib -coverage-report: regen-grammar regen-token regen-importlib +coverage-report: regen-token regen-importlib @ # build with coverage info $(MAKE) coverage @ # run tests, ignore failures @@ -749,8 +741,8 @@ regen-importlib: Programs/_freeze_importlib ############################################################################ # Regenerate all generated files -regen-all: regen-opcode regen-opcode-targets regen-typeslots regen-grammar \ - regen-token regen-keyword regen-symbol regen-ast regen-importlib clinic \ +regen-all: regen-opcode regen-opcode-targets regen-typeslots \ + regen-token regen-symbol regen-ast regen-importlib clinic \ regen-pegen-metaparser regen-pegen ############################################################################ @@ -816,18 +808,6 @@ Python/initconfig.o: $(srcdir)/Python/initconfig.c $(IO_OBJS): $(IO_H) -.PHONY: regen-grammar -regen-grammar: regen-token - # Regenerate Include/graminit.h and Python/graminit.c - # from Grammar/Grammar using pgen - @$(MKDIR_P) Include - PYTHONPATH=$(srcdir) $(PYTHON_FOR_REGEN) -m Parser.pgen $(srcdir)/Grammar/Grammar \ - $(srcdir)/Grammar/Tokens \ - $(srcdir)/Include/graminit.h.new \ - $(srcdir)/Python/graminit.c.new - $(UPDATE_FILE) $(srcdir)/Include/graminit.h $(srcdir)/Include/graminit.h.new - $(UPDATE_FILE) $(srcdir)/Python/graminit.c $(srcdir)/Python/graminit.c.new - .PHONY: regen-pegen-metaparser regen-pegen-metaparser: @$(MKDIR_P) $(srcdir)/Tools/peg_generator/pegen @@ -839,12 +819,12 @@ regen-pegen-metaparser: .PHONY: regen-pegen regen-pegen: - @$(MKDIR_P) $(srcdir)/Parser/pegen + @$(MKDIR_P) $(srcdir)/Parser PYTHONPATH=$(srcdir)/Tools/peg_generator $(PYTHON_FOR_REGEN) -m pegen -q c \ $(srcdir)/Grammar/python.gram \ $(srcdir)/Grammar/Tokens \ - -o $(srcdir)/Parser/pegen/parse.new.c - $(UPDATE_FILE) $(srcdir)/Parser/pegen/parse.c $(srcdir)/Parser/pegen/parse.new.c + -o $(srcdir)/Parser/parser.new.c + $(UPDATE_FILE) $(srcdir)/Parser/parser.c $(srcdir)/Parser/parser.new.c .PHONY=regen-ast regen-ast: diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-10-11-27-15.bpo-40939.DO-wAI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-10-11-27-15.bpo-40939.DO-wAI.rst new file mode 100644 index 0000000000000..b12985d081604 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-10-11-27-15.bpo-40939.DO-wAI.rst @@ -0,0 +1 @@ +Remove the old parser, the :mod:`parser` module and all associated support code, command-line options and environment variables. Patch by Pablo Galindo. \ No newline at end of file diff --git a/Modules/Setup b/Modules/Setup index 02cfb67518df7..5d428d5b8baa7 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -134,9 +134,6 @@ faulthandler faulthandler.c # can call _PyTraceMalloc_NewReference(). _tracemalloc _tracemalloc.c -# PEG-based parser module -- slated to be *the* parser -_peg_parser _peg_parser.c - # The rest of the modules listed in this file are all commented out by # default. Usually they can be detected and built as dynamically # loaded modules by the new setup.py script added in Python 2.1. If @@ -331,10 +328,6 @@ _symtable symtablemodule.c # Helper module for various ascii-encoders #binascii binascii.c -# Fred Drake's interface to the Python parser -#parser parsermodule.c - - # Andrew Kuchling's zlib module. # This require zlib 1.1.3 (or later). # See http://www.gzip.org/zlib/ diff --git a/Modules/_peg_parser.c b/Modules/_peg_parser.c deleted file mode 100644 index ca2a3cf7b5fd8..0000000000000 --- a/Modules/_peg_parser.c +++ /dev/null @@ -1,153 +0,0 @@ -#include -#include "pegen_interface.h" - -static int -_mode_str_to_int(char *mode_str) -{ - int mode; - if (strcmp(mode_str, "exec") == 0) { - mode = Py_file_input; - } - else if (strcmp(mode_str, "eval") == 0) { - mode = Py_eval_input; - } - else if (strcmp(mode_str, "single") == 0) { - mode = Py_single_input; - } - else { - mode = -1; - } - return mode; -} - -static mod_ty -_run_parser(char *str, char *filename, int mode, PyCompilerFlags *flags, PyArena *arena, int oldparser) -{ - mod_ty mod; - if (!oldparser) { - mod = PyPegen_ASTFromString(str, filename, mode, flags, arena); - } - else { - mod = PyParser_ASTFromString(str, filename, mode, flags, arena); - } - return mod; -} - -PyObject * -_Py_compile_string(PyObject *self, PyObject *args, PyObject *kwds) -{ - static char *keywords[] = {"string", "filename", "mode", "oldparser", NULL}; - char *the_string; - char *filename = ""; - char *mode_str = "exec"; - int oldparser = 0; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|ssp", keywords, - &the_string, &filename, &mode_str, &oldparser)) { - return NULL; - } - - int mode = _mode_str_to_int(mode_str); - if (mode == -1) { - return PyErr_Format(PyExc_ValueError, "mode must be either 'exec' or 'eval' or 'single'"); - } - - PyCompilerFlags flags = _PyCompilerFlags_INIT; - flags.cf_flags = PyCF_IGNORE_COOKIE; - - PyArena *arena = PyArena_New(); - if (arena == NULL) { - return NULL; - } - - mod_ty mod = _run_parser(the_string, filename, mode, &flags, arena, oldparser); - if (mod == NULL) { - PyArena_Free(arena); - return NULL; - } - - PyObject *filename_ob = PyUnicode_DecodeFSDefault(filename); - if (filename_ob == NULL) { - PyArena_Free(arena); - return NULL; - } - PyCodeObject *result = PyAST_CompileObject(mod, filename_ob, &flags, -1, arena); - Py_XDECREF(filename_ob); - PyArena_Free(arena); - return (PyObject *)result; -} - -PyObject * -_Py_parse_string(PyObject *self, PyObject *args, PyObject *kwds) -{ - static char *keywords[] = {"string", "filename", "mode", "oldparser", "ast", NULL}; - char *the_string; - char *filename = ""; - char *mode_str = "exec"; - int oldparser = 0; - int ast = 1; - - if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|sspp", keywords, - &the_string, &filename, &mode_str, &oldparser, &ast)) { - return NULL; - } - - int mode = _mode_str_to_int(mode_str); - if (mode == -1) { - return PyErr_Format(PyExc_ValueError, "mode must be either 'exec' or 'eval' or 'single'"); - } - - PyCompilerFlags flags = _PyCompilerFlags_INIT; - flags.cf_flags = PyCF_IGNORE_COOKIE; - - PyArena *arena = PyArena_New(); - if (arena == NULL) { - return NULL; - } - - mod_ty mod = _run_parser(the_string, filename, mode, &flags, arena, oldparser); - if (mod == NULL) { - PyArena_Free(arena); - return NULL; - } - - PyObject *result; - if (ast) { - result = PyAST_mod2obj(mod); - } - else { - Py_INCREF(Py_None); - result = Py_None; - } - PyArena_Free(arena); - return result; -} - -static PyMethodDef ParseMethods[] = { - { - "parse_string", - (PyCFunction)(void (*)(void))_Py_parse_string, - METH_VARARGS|METH_KEYWORDS, - "Parse a string, return an AST." - }, - { - "compile_string", - (PyCFunction)(void (*)(void))_Py_compile_string, - METH_VARARGS|METH_KEYWORDS, - "Compile a string, return a code object." - }, - {NULL, NULL, 0, NULL} /* Sentinel */ -}; - -static struct PyModuleDef parsemodule = { - PyModuleDef_HEAD_INIT, - .m_name = "peg_parser", - .m_doc = "A parser.", - .m_methods = ParseMethods, -}; - -PyMODINIT_FUNC -PyInit__peg_parser(void) -{ - return PyModule_Create(&parsemodule); -} diff --git a/Modules/parsermodule.c b/Modules/parsermodule.c deleted file mode 100644 index 24b0ffbe36a7d..0000000000000 --- a/Modules/parsermodule.c +++ /dev/null @@ -1,1222 +0,0 @@ -/* parsermodule.c - * - * Copyright 1995-1996 by Fred L. Drake, Jr. and Virginia Polytechnic - * Institute and State University, Blacksburg, Virginia, USA. - * Portions copyright 1991-1995 by Stichting Mathematisch Centrum, - * Amsterdam, The Netherlands. Copying is permitted under the terms - * associated with the main Python distribution, with the additional - * restriction that this additional notice be included and maintained - * on all distributed copies. - * - * This module serves to replace the original parser module written - * by Guido. The functionality is not matched precisely, but the - * original may be implemented on top of this. This is desirable - * since the source of the text to be parsed is now divorced from - * this interface. - * - * Unlike the prior interface, the ability to give a parse tree - * produced by Python code as a tuple to the compiler is enabled by - * this module. See the documentation for more details. - * - * I've added some annotations that help with the lint code-checking - * program, but they're not complete by a long shot. The real errors - * that lint detects are gone, but there are still warnings with - * Py_[X]DECREF() and Py_[X]INCREF() macros. The lint annotations - * look like "NOTE(...)". - * - */ - -#include "Python.h" /* general Python API */ -#include "Python-ast.h" /* mod_ty */ -#undef Yield /* undefine macro conflicting with */ -#include "ast.h" -#include "graminit.h" /* symbols defined in the grammar */ -#include "node.h" /* internal parser structure */ -#include "errcode.h" /* error codes for PyNode_*() */ -#include "token.h" /* token definitions */ - /* ISTERMINAL() / ISNONTERMINAL() */ -#include "grammar.h" -#include "parsetok.h" - -extern grammar _PyParser_Grammar; /* From graminit.c */ - -#ifdef lint -#include -#else -#define NOTE(x) -#endif - -/* String constants used to initialize module attributes. - * - */ -static const char parser_copyright_string[] = -"Copyright 1995-1996 by Virginia Polytechnic Institute & State\n\ -University, Blacksburg, Virginia, USA, and Fred L. Drake, Jr., Reston,\n\ -Virginia, USA. Portions copyright 1991-1995 by Stichting Mathematisch\n\ -Centrum, Amsterdam, The Netherlands."; - - -PyDoc_STRVAR(parser_doc_string, -"This is an interface to Python's internal parser."); - -static const char parser_version_string[] = "0.5"; - - -typedef PyObject* (*SeqMaker) (Py_ssize_t length); -typedef int (*SeqInserter) (PyObject* sequence, - Py_ssize_t index, - PyObject* element); - -/* The function below is copyrighted by Stichting Mathematisch Centrum. The - * original copyright statement is included below, and continues to apply - * in full to the function immediately following. All other material is - * original, copyrighted by Fred L. Drake, Jr. and Virginia Polytechnic - * Institute and State University. Changes were made to comply with the - * new naming conventions. Added arguments to provide support for creating - * lists as well as tuples, and optionally including the line numbers. - */ - - -static PyObject* -node2tuple(node *n, /* node to convert */ - SeqMaker mkseq, /* create sequence */ - SeqInserter addelem, /* func. to add elem. in seq. */ - int lineno, /* include line numbers? */ - int col_offset) /* include column offsets? */ -{ - PyObject *result = NULL, *w; - - if (n == NULL) { - Py_RETURN_NONE; - } - - if (ISNONTERMINAL(TYPE(n))) { - int i; - - result = mkseq(1 + NCH(n) + (TYPE(n) == encoding_decl)); - if (result == NULL) - goto error; - - w = PyLong_FromLong(TYPE(n)); - if (w == NULL) - goto error; - (void) addelem(result, 0, w); - - for (i = 0; i < NCH(n); i++) { - w = node2tuple(CHILD(n, i), mkseq, addelem, lineno, col_offset); - if (w == NULL) - goto error; - (void) addelem(result, i+1, w); - } - - if (TYPE(n) == encoding_decl) { - w = PyUnicode_FromString(STR(n)); - if (w == NULL) - goto error; - (void) addelem(result, i+1, w); - } - } - else if (ISTERMINAL(TYPE(n))) { - result = mkseq(2 + lineno + col_offset); - if (result == NULL) - goto error; - - w = PyLong_FromLong(TYPE(n)); - if (w == NULL) - goto error; - (void) addelem(result, 0, w); - - w = PyUnicode_FromString(STR(n)); - if (w == NULL) - goto error; - (void) addelem(result, 1, w); - - if (lineno) { - w = PyLong_FromLong(n->n_lineno); - if (w == NULL) - goto error; - (void) addelem(result, 2, w); - } - - if (col_offset) { - w = PyLong_FromLong(n->n_col_offset); - if (w == NULL) - goto error; - (void) addelem(result, 2 + lineno, w); - } - } - else { - PyErr_SetString(PyExc_SystemError, - "unrecognized parse tree node type"); - return ((PyObject*) NULL); - } - return result; - -error: - Py_XDECREF(result); - return NULL; -} -/* - * End of material copyrighted by Stichting Mathematisch Centrum. - */ - - - -/* There are two types of intermediate objects we're interested in: - * 'eval' and 'exec' types. These constants can be used in the st_type - * field of the object type to identify which any given object represents. - * These should probably go in an external header to allow other extensions - * to use them, but then, we really should be using C++ too. ;-) - */ - -#define PyST_EXPR 1 -#define PyST_SUITE 2 - - -/* These are the internal objects and definitions required to implement the - * ST type. Most of the internal names are more reminiscent of the 'old' - * naming style, but the code uses the new naming convention. - */ - -static PyObject* -parser_error = 0; - - -typedef struct { - PyObject_HEAD /* standard object header */ - node* st_node; /* the node* returned by the parser */ - int st_type; /* EXPR or SUITE ? */ - PyCompilerFlags st_flags; /* Parser and compiler flags */ -} PyST_Object; - - -static void parser_free(PyST_Object *st); -static PyObject* parser_sizeof(PyST_Object *, void *); -static PyObject* parser_richcompare(PyObject *left, PyObject *right, int op); -static PyObject* parser_compilest(PyST_Object *, PyObject *, PyObject *); -static PyObject* parser_isexpr(PyST_Object *, PyObject *, PyObject *); -static PyObject* parser_issuite(PyST_Object *, PyObject *, PyObject *); -static PyObject* parser_st2list(PyST_Object *, PyObject *, PyObject *); -static PyObject* parser_st2tuple(PyST_Object *, PyObject *, PyObject *); - -#define PUBLIC_METHOD_TYPE (METH_VARARGS|METH_KEYWORDS) - -static PyMethodDef parser_methods[] = { - {"compile", (PyCFunction)(void(*)(void))parser_compilest, PUBLIC_METHOD_TYPE, - PyDoc_STR("Compile this ST object into a code object.")}, - {"isexpr", (PyCFunction)(void(*)(void))parser_isexpr, PUBLIC_METHOD_TYPE, - PyDoc_STR("Determines if this ST object was created from an expression.")}, - {"issuite", (PyCFunction)(void(*)(void))parser_issuite, PUBLIC_METHOD_TYPE, - PyDoc_STR("Determines if this ST object was created from a suite.")}, - {"tolist", (PyCFunction)(void(*)(void))parser_st2list, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates a list-tree representation of this ST.")}, - {"totuple", (PyCFunction)(void(*)(void))parser_st2tuple, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates a tuple-tree representation of this ST.")}, - {"__sizeof__", (PyCFunction)parser_sizeof, METH_NOARGS, - PyDoc_STR("Returns size in memory, in bytes.")}, - {NULL, NULL, 0, NULL} -}; - -static -PyTypeObject PyST_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "parser.st", /* tp_name */ - (int) sizeof(PyST_Object), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)parser_free, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - - /* Functions to access object as input/output buffer */ - 0, /* tp_as_buffer */ - - Py_TPFLAGS_DEFAULT, /* tp_flags */ - - /* __doc__ */ - "Intermediate representation of a Python parse tree.", - 0, /* tp_traverse */ - 0, /* tp_clear */ - parser_richcompare, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - parser_methods, /* tp_methods */ -}; /* PyST_Type */ - - -/* PyST_Type isn't subclassable, so just check ob_type */ -#define PyST_Object_Check(v) Py_IS_TYPE(v, &PyST_Type) - -static int -parser_compare_nodes(node *left, node *right) -{ - int j; - - if (TYPE(left) < TYPE(right)) - return (-1); - - if (TYPE(right) < TYPE(left)) - return (1); - - if (ISTERMINAL(TYPE(left))) - return (strcmp(STR(left), STR(right))); - - if (NCH(left) < NCH(right)) - return (-1); - - if (NCH(right) < NCH(left)) - return (1); - - for (j = 0; j < NCH(left); ++j) { - int v = parser_compare_nodes(CHILD(left, j), CHILD(right, j)); - - if (v != 0) - return (v); - } - return (0); -} - -/* parser_richcompare(PyObject* left, PyObject* right, int op) - * - * Comparison function used by the Python operators ==, !=, <, >, <=, >= - * This really just wraps a call to parser_compare_nodes() with some easy - * checks and protection code. - * - */ - -static PyObject * -parser_richcompare(PyObject *left, PyObject *right, int op) -{ - int result; - - /* neither argument should be NULL, unless something's gone wrong */ - if (left == NULL || right == NULL) { - PyErr_BadInternalCall(); - return NULL; - } - - /* both arguments should be instances of PyST_Object */ - if (!PyST_Object_Check(left) || !PyST_Object_Check(right)) { - Py_RETURN_NOTIMPLEMENTED; - } - - if (left == right) - /* if arguments are identical, they're equal */ - result = 0; - else - result = parser_compare_nodes(((PyST_Object *)left)->st_node, - ((PyST_Object *)right)->st_node); - - Py_RETURN_RICHCOMPARE(result, 0, op); -} - -/* parser_newstobject(node* st) - * - * Allocates a new Python object representing an ST. This is simply the - * 'wrapper' object that holds a node* and allows it to be passed around in - * Python code. - * - */ -static PyObject* -parser_newstobject(node *st, int type) -{ - PyST_Object* o = PyObject_New(PyST_Object, &PyST_Type); - - if (o != 0) { - o->st_node = st; - o->st_type = type; - o->st_flags = _PyCompilerFlags_INIT; - } - else { - PyNode_Free(st); - } - return ((PyObject*)o); -} - - -/* void parser_free(PyST_Object* st) - * - * This is called by a del statement that reduces the reference count to 0. - * - */ -static void -parser_free(PyST_Object *st) -{ - PyNode_Free(st->st_node); - PyObject_Del(st); -} - -static PyObject * -parser_sizeof(PyST_Object *st, void *unused) -{ - Py_ssize_t res; - - res = _PyObject_SIZE(Py_TYPE(st)) + _PyNode_SizeOf(st->st_node); - return PyLong_FromSsize_t(res); -} - - -/* parser_st2tuple(PyObject* self, PyObject* args, PyObject* kw) - * - * This provides conversion from a node* to a tuple object that can be - * returned to the Python-level caller. The ST object is not modified. - * - */ -static PyObject* -parser_st2tuple(PyST_Object *self, PyObject *args, PyObject *kw) -{ - int line_info = 0; - int col_info = 0; - PyObject *res = 0; - int ok; - - static char *keywords[] = {"st", "line_info", "col_info", NULL}; - - if (self == NULL || PyModule_Check(self)) { - ok = PyArg_ParseTupleAndKeywords(args, kw, "O!|pp:st2tuple", keywords, - &PyST_Type, &self, &line_info, - &col_info); - } - else - ok = PyArg_ParseTupleAndKeywords(args, kw, "|pp:totuple", &keywords[1], - &line_info, &col_info); - if (ok != 0) { - /* - * Convert ST into a tuple representation. Use Guido's function, - * since it's known to work already. - */ - res = node2tuple(((PyST_Object*)self)->st_node, - PyTuple_New, PyTuple_SetItem, line_info, col_info); - } - return (res); -} - - -/* parser_st2list(PyObject* self, PyObject* args, PyObject* kw) - * - * This provides conversion from a node* to a list object that can be - * returned to the Python-level caller. The ST object is not modified. - * - */ -static PyObject* -parser_st2list(PyST_Object *self, PyObject *args, PyObject *kw) -{ - int line_info = 0; - int col_info = 0; - PyObject *res = 0; - int ok; - - static char *keywords[] = {"st", "line_info", "col_info", NULL}; - - if (self == NULL || PyModule_Check(self)) - ok = PyArg_ParseTupleAndKeywords(args, kw, "O!|pp:st2list", keywords, - &PyST_Type, &self, &line_info, - &col_info); - else - ok = PyArg_ParseTupleAndKeywords(args, kw, "|pp:tolist", &keywords[1], - &line_info, &col_info); - if (ok) { - /* - * Convert ST into a tuple representation. Use Guido's function, - * since it's known to work already. - */ - res = node2tuple(self->st_node, - PyList_New, PyList_SetItem, line_info, col_info); - } - return (res); -} - - -/* parser_compilest(PyObject* self, PyObject* args) - * - * This function creates code objects from the parse tree represented by - * the passed-in data object. An optional file name is passed in as well. - * - */ -static PyObject* -parser_compilest(PyST_Object *self, PyObject *args, PyObject *kw) -{ - PyObject* res = NULL; - PyArena* arena = NULL; - mod_ty mod; - PyObject* filename = NULL; - int ok; - - static char *keywords[] = {"st", "filename", NULL}; - - if (self == NULL || PyModule_Check(self)) - ok = PyArg_ParseTupleAndKeywords(args, kw, "O!|O&:compilest", keywords, - &PyST_Type, &self, - PyUnicode_FSDecoder, &filename); - else - ok = PyArg_ParseTupleAndKeywords(args, kw, "|O&:compile", &keywords[1], - PyUnicode_FSDecoder, &filename); - if (!ok) - goto error; - - if (filename == NULL) { - filename = PyUnicode_FromString(""); - if (filename == NULL) - goto error; - } - - arena = PyArena_New(); - if (!arena) - goto error; - - mod = PyAST_FromNodeObject(self->st_node, &self->st_flags, - filename, arena); - if (!mod) - goto error; - - res = (PyObject *)PyAST_CompileObject(mod, filename, - &self->st_flags, -1, arena); -error: - Py_XDECREF(filename); - if (arena != NULL) - PyArena_Free(arena); - return res; -} - - -/* PyObject* parser_isexpr(PyObject* self, PyObject* args) - * PyObject* parser_issuite(PyObject* self, PyObject* args) - * - * Checks the passed-in ST object to determine if it is an expression or - * a statement suite, respectively. The return is a Python truth value. - * - */ -static PyObject* -parser_isexpr(PyST_Object *self, PyObject *args, PyObject *kw) -{ - PyObject* res = 0; - int ok; - - static char *keywords[] = {"st", NULL}; - - if (self == NULL || PyModule_Check(self)) - ok = PyArg_ParseTupleAndKeywords(args, kw, "O!:isexpr", keywords, - &PyST_Type, &self); - else - ok = PyArg_ParseTupleAndKeywords(args, kw, ":isexpr", &keywords[1]); - - if (ok) { - /* Check to see if the ST represents an expression or not. */ - res = (self->st_type == PyST_EXPR) ? Py_True : Py_False; - Py_INCREF(res); - } - return (res); -} - - -static PyObject* -parser_issuite(PyST_Object *self, PyObject *args, PyObject *kw) -{ - PyObject* res = 0; - int ok; - - static char *keywords[] = {"st", NULL}; - - if (self == NULL || PyModule_Check(self)) - ok = PyArg_ParseTupleAndKeywords(args, kw, "O!:issuite", keywords, - &PyST_Type, &self); - else - ok = PyArg_ParseTupleAndKeywords(args, kw, ":issuite", &keywords[1]); - - if (ok) { - /* Check to see if the ST represents an expression or not. */ - res = (self->st_type == PyST_EXPR) ? Py_False : Py_True; - Py_INCREF(res); - } - return (res); -} - - -/* err_string(const char* message) - * - * Sets the error string for an exception of type ParserError. - * - */ -static void -err_string(const char *message) -{ - PyErr_SetString(parser_error, message); -} - - -/* PyObject* parser_do_parse(PyObject* args, int type) - * - * Internal function to actually execute the parse and return the result if - * successful or set an exception if not. - * - */ -static PyObject* -parser_do_parse(PyObject *args, PyObject *kw, const char *argspec, int type) -{ - char* string = 0; - PyObject* res = 0; - int flags = 0; - perrdetail err; - - static char *keywords[] = {"source", NULL}; - - if (PyArg_ParseTupleAndKeywords(args, kw, argspec, keywords, &string)) { - node* n = PyParser_ParseStringFlagsFilenameEx(string, NULL, - &_PyParser_Grammar, - (type == PyST_EXPR) - ? eval_input : file_input, - &err, &flags); - - if (n) { - res = parser_newstobject(n, type); - if (res) { - ((PyST_Object *)res)->st_flags.cf_flags = flags & PyCF_MASK; - ((PyST_Object *)res)->st_flags.cf_feature_version = PY_MINOR_VERSION; - } - } - else { - PyParser_SetError(&err); - } - PyParser_ClearError(&err); - } - return (res); -} - - -/* PyObject* parser_expr(PyObject* self, PyObject* args) - * PyObject* parser_suite(PyObject* self, PyObject* args) - * - * External interfaces to the parser itself. Which is called determines if - * the parser attempts to recognize an expression ('eval' form) or statement - * suite ('exec' form). The real work is done by parser_do_parse() above. - * - */ -static PyObject* -parser_expr(PyST_Object *self, PyObject *args, PyObject *kw) -{ - NOTE(ARGUNUSED(self)) - return (parser_do_parse(args, kw, "s:expr", PyST_EXPR)); -} - - -static PyObject* -parser_suite(PyST_Object *self, PyObject *args, PyObject *kw) -{ - NOTE(ARGUNUSED(self)) - return (parser_do_parse(args, kw, "s:suite", PyST_SUITE)); -} - - - -/* This is the messy part of the code. Conversion from a tuple to an ST - * object requires that the input tuple be valid without having to rely on - * catching an exception from the compiler. This is done to allow the - * compiler itself to remain fast, since most of its input will come from - * the parser directly, and therefore be known to be syntactically correct. - * This validation is done to ensure that we don't core dump the compile - * phase, returning an exception instead. - * - * Two aspects can be broken out in this code: creating a node tree from - * the tuple passed in, and verifying that it is indeed valid. It may be - * advantageous to expand the number of ST types to include funcdefs and - * lambdadefs to take advantage of the optimizer, recognizing those STs - * here. They are not necessary, and not quite as useful in a raw form. - * For now, let's get expressions and suites working reliably. - */ - - -static node* build_node_tree(PyObject *tuple); - -static int -validate_node(node *tree) -{ - int type = TYPE(tree); - int nch = NCH(tree); - state *dfa_state; - int pos, arc; - - assert(ISNONTERMINAL(type)); - type -= NT_OFFSET; - if (type >= _PyParser_Grammar.g_ndfas) { - PyErr_Format(parser_error, "Unrecognized node type %d.", TYPE(tree)); - return 0; - } - const dfa *nt_dfa = &_PyParser_Grammar.g_dfa[type]; - REQ(tree, nt_dfa->d_type); - - /* Run the DFA for this nonterminal. */ - dfa_state = nt_dfa->d_state; - for (pos = 0; pos < nch; ++pos) { - node *ch = CHILD(tree, pos); - int ch_type = TYPE(ch); - if ((ch_type >= NT_OFFSET + _PyParser_Grammar.g_ndfas) - || (ISTERMINAL(ch_type) && (ch_type >= N_TOKENS)) - || (ch_type < 0) - ) { - PyErr_Format(parser_error, "Unrecognized node type %d.", ch_type); - return 0; - } - if (ch_type == suite && TYPE(tree) == funcdef) { - /* This is the opposite hack of what we do in parser.c - (search for func_body_suite), except we don't ever - support type comments here. */ - ch_type = func_body_suite; - } - for (arc = 0; arc < dfa_state->s_narcs; ++arc) { - short a_label = dfa_state->s_arc[arc].a_lbl; - assert(a_label < _PyParser_Grammar.g_ll.ll_nlabels); - - const char *label_str = _PyParser_Grammar.g_ll.ll_label[a_label].lb_str; - if ((_PyParser_Grammar.g_ll.ll_label[a_label].lb_type == ch_type) - && ((ch->n_str == NULL) || (label_str == NULL) - || (strcmp(ch->n_str, label_str) == 0)) - ) { - /* The child is acceptable; if non-terminal, validate it recursively. */ - if (ISNONTERMINAL(ch_type) && !validate_node(ch)) - return 0; - - /* Update the state, and move on to the next child. */ - dfa_state = &nt_dfa->d_state[dfa_state->s_arc[arc].a_arrow]; - goto arc_found; - } - } - /* What would this state have accepted? */ - { - short a_label = dfa_state->s_arc->a_lbl; - if (!a_label) /* Wouldn't accept any more children */ - goto illegal_num_children; - - int next_type = _PyParser_Grammar.g_ll.ll_label[a_label].lb_type; - const char *expected_str = _PyParser_Grammar.g_ll.ll_label[a_label].lb_str; - - if (ISNONTERMINAL(next_type)) { - PyErr_Format(parser_error, "Expected %s, got %s.", - _PyParser_Grammar.g_dfa[next_type - NT_OFFSET].d_name, - ISTERMINAL(ch_type) ? _PyParser_TokenNames[ch_type] : - _PyParser_Grammar.g_dfa[ch_type - NT_OFFSET].d_name); - } - else if (expected_str != NULL) { - PyErr_Format(parser_error, "Illegal terminal: expected '%s'.", - expected_str); - } - else { - PyErr_Format(parser_error, "Illegal terminal: expected %s.", - _PyParser_TokenNames[next_type]); - } - return 0; - } - -arc_found: - continue; - } - /* Are we in a final state? If so, return 1 for successful validation. */ - for (arc = 0; arc < dfa_state->s_narcs; ++arc) { - if (!dfa_state->s_arc[arc].a_lbl) { - return 1; - } - } - -illegal_num_children: - PyErr_Format(parser_error, - "Illegal number of children for %s node.", nt_dfa->d_name); - return 0; -} - -/* PyObject* parser_tuple2st(PyObject* self, PyObject* args) - * - * This is the public function, called from the Python code. It receives a - * single tuple object from the caller, and creates an ST object if the - * tuple can be validated. It does this by checking the first code of the - * tuple, and, if acceptable, builds the internal representation. If this - * step succeeds, the internal representation is validated as fully as - * possible with the recursive validate_node() routine defined above. - * - * This function must be changed if support is to be added for PyST_FRAGMENT - * ST objects. - * - */ -static PyObject* -parser_tuple2st(PyST_Object *self, PyObject *args, PyObject *kw) -{ - NOTE(ARGUNUSED(self)) - PyObject *st = 0; - PyObject *tuple; - node *tree; - - static char *keywords[] = {"sequence", NULL}; - - if (!PyArg_ParseTupleAndKeywords(args, kw, "O:sequence2st", keywords, - &tuple)) - return (0); - if (!PySequence_Check(tuple)) { - PyErr_SetString(PyExc_ValueError, - "sequence2st() requires a single sequence argument"); - return (0); - } - /* - * Convert the tree to the internal form before checking it. - */ - tree = build_node_tree(tuple); - if (tree != 0) { - node *validation_root = NULL; - int tree_type = 0; - switch (TYPE(tree)) { - case eval_input: - /* Might be an eval form. */ - tree_type = PyST_EXPR; - validation_root = tree; - break; - case encoding_decl: - /* This looks like an encoding_decl so far. */ - if (NCH(tree) == 1) { - tree_type = PyST_SUITE; - validation_root = CHILD(tree, 0); - } - else { - err_string("Error Parsing encoding_decl"); - } - break; - case file_input: - /* This looks like an exec form so far. */ - tree_type = PyST_SUITE; - validation_root = tree; - break; - default: - /* This is a fragment, at best. */ - err_string("parse tree does not use a valid start symbol"); - } - - if (validation_root != NULL && validate_node(validation_root)) - st = parser_newstobject(tree, tree_type); - else - PyNode_Free(tree); - } - /* Make sure we raise an exception on all errors. We should never - * get this, but we'd do well to be sure something is done. - */ - if (st == NULL && !PyErr_Occurred()) - err_string("unspecified ST error occurred"); - - return st; -} - - -/* node* build_node_children() - * - * Iterate across the children of the current non-terminal node and build - * their structures. If successful, return the root of this portion of - * the tree, otherwise, 0. Any required exception will be specified already, - * and no memory will have been deallocated. - * - */ -static node* -build_node_children(PyObject *tuple, node *root, int *line_num) -{ - Py_ssize_t len = PyObject_Size(tuple); - Py_ssize_t i; - int err; - - if (len < 0) { - return NULL; - } - for (i = 1; i < len; ++i) { - /* elem must always be a sequence, however simple */ - PyObject* elem = PySequence_GetItem(tuple, i); - int ok = elem != NULL; - int type = 0; - char *strn = 0; - - if (ok) - ok = PySequence_Check(elem); - if (ok) { - PyObject *temp = PySequence_GetItem(elem, 0); - if (temp == NULL) - ok = 0; - else { - ok = PyLong_Check(temp); - if (ok) { - type = _PyLong_AsInt(temp); - if (type == -1 && PyErr_Occurred()) { - Py_DECREF(temp); - Py_DECREF(elem); - return NULL; - } - } - Py_DECREF(temp); - } - } - if (!ok) { - PyObject *err = Py_BuildValue("Os", elem, - "Illegal node construct."); - PyErr_SetObject(parser_error, err); - Py_XDECREF(err); - Py_XDECREF(elem); - return NULL; - } - if (ISTERMINAL(type)) { - Py_ssize_t len = PyObject_Size(elem); - PyObject *temp; - const char *temp_str; - - if ((len != 2) && (len != 3)) { - err_string("terminal nodes must have 2 or 3 entries"); - Py_DECREF(elem); - return NULL; - } - temp = PySequence_GetItem(elem, 1); - if (temp == NULL) { - Py_DECREF(elem); - return NULL; - } - if (!PyUnicode_Check(temp)) { - PyErr_Format(parser_error, - "second item in terminal node must be a string," - " found %s", - Py_TYPE(temp)->tp_name); - Py_DECREF(temp); - Py_DECREF(elem); - return NULL; - } - if (len == 3) { - PyObject *o = PySequence_GetItem(elem, 2); - if (o == NULL) { - Py_DECREF(temp); - Py_DECREF(elem); - return NULL; - } - if (PyLong_Check(o)) { - int num = _PyLong_AsInt(o); - if (num == -1 && PyErr_Occurred()) { - Py_DECREF(o); - Py_DECREF(temp); - Py_DECREF(elem); - return NULL; - } - *line_num = num; - } - else { - PyErr_Format(parser_error, - "third item in terminal node must be an" - " integer, found %s", - Py_TYPE(temp)->tp_name); - Py_DECREF(o); - Py_DECREF(temp); - Py_DECREF(elem); - return NULL; - } - Py_DECREF(o); - } - temp_str = PyUnicode_AsUTF8AndSize(temp, &len); - if (temp_str == NULL) { - Py_DECREF(temp); - Py_DECREF(elem); - return NULL; - } - strn = (char *)PyObject_MALLOC(len + 1); - if (strn == NULL) { - Py_DECREF(temp); - Py_DECREF(elem); - PyErr_NoMemory(); - return NULL; - } - (void) memcpy(strn, temp_str, len + 1); - Py_DECREF(temp); - } - else if (!ISNONTERMINAL(type)) { - /* - * It has to be one or the other; this is an error. - * Raise an exception. - */ - PyObject *err = Py_BuildValue("Os", elem, "unknown node type."); - PyErr_SetObject(parser_error, err); - Py_XDECREF(err); - Py_DECREF(elem); - return NULL; - } - err = PyNode_AddChild(root, type, strn, *line_num, 0, *line_num, 0); - if (err == E_NOMEM) { - Py_DECREF(elem); - PyObject_FREE(strn); - PyErr_NoMemory(); - return NULL; - } - if (err == E_OVERFLOW) { - Py_DECREF(elem); - PyObject_FREE(strn); - PyErr_SetString(PyExc_ValueError, - "unsupported number of child nodes"); - return NULL; - } - - if (ISNONTERMINAL(type)) { - node* new_child = CHILD(root, i - 1); - - if (new_child != build_node_children(elem, new_child, line_num)) { - Py_DECREF(elem); - return NULL; - } - } - else if (type == NEWLINE) { /* It's true: we increment the */ - ++(*line_num); /* line number *after* the newline! */ - } - Py_DECREF(elem); - } - return root; -} - - -static node* -build_node_tree(PyObject *tuple) -{ - node* res = 0; - PyObject *temp = PySequence_GetItem(tuple, 0); - long num = -1; - - if (temp != NULL) - num = PyLong_AsLong(temp); - Py_XDECREF(temp); - if (ISTERMINAL(num)) { - /* - * The tuple is simple, but it doesn't start with a start symbol. - * Raise an exception now and be done with it. - */ - tuple = Py_BuildValue("Os", tuple, - "Illegal syntax-tree; cannot start with terminal symbol."); - PyErr_SetObject(parser_error, tuple); - Py_XDECREF(tuple); - } - else if (ISNONTERMINAL(num)) { - /* - * Not efficient, but that can be handled later. - */ - int line_num = 0; - PyObject *encoding = NULL; - - if (num == encoding_decl) { - encoding = PySequence_GetItem(tuple, 2); - if (encoding == NULL) { - PyErr_SetString(parser_error, "missed encoding"); - return NULL; - } - if (!PyUnicode_Check(encoding)) { - PyErr_Format(parser_error, - "encoding must be a string, found %.200s", - Py_TYPE(encoding)->tp_name); - Py_DECREF(encoding); - return NULL; - } - /* tuple isn't borrowed anymore here, need to DECREF */ - tuple = PySequence_GetSlice(tuple, 0, 2); - if (tuple == NULL) { - Py_DECREF(encoding); - return NULL; - } - } - res = PyNode_New(num); - if (res != NULL) { - if (res != build_node_children(tuple, res, &line_num)) { - PyNode_Free(res); - res = NULL; - } - if (res && encoding) { - Py_ssize_t len; - const char *temp; - temp = PyUnicode_AsUTF8AndSize(encoding, &len); - if (temp == NULL) { - PyNode_Free(res); - Py_DECREF(encoding); - Py_DECREF(tuple); - return NULL; - } - res->n_str = (char *)PyObject_MALLOC(len + 1); - if (res->n_str == NULL) { - PyNode_Free(res); - Py_DECREF(encoding); - Py_DECREF(tuple); - PyErr_NoMemory(); - return NULL; - } - (void) memcpy(res->n_str, temp, len + 1); - } - } - if (encoding != NULL) { - Py_DECREF(encoding); - Py_DECREF(tuple); - } - } - else { - /* The tuple is illegal -- if the number is neither TERMINAL nor - * NONTERMINAL, we can't use it. Not sure the implementation - * allows this condition, but the API doesn't preclude it. - */ - PyObject *err = Py_BuildValue("Os", tuple, - "Illegal component tuple."); - PyErr_SetObject(parser_error, err); - Py_XDECREF(err); - } - - return (res); -} - - -static PyObject* -pickle_constructor = NULL; - - -static PyObject* -parser__pickler(PyObject *self, PyObject *args) -{ - NOTE(ARGUNUSED(self)) - PyObject *result = NULL; - PyObject *st = NULL; - - if (PyArg_ParseTuple(args, "O!:_pickler", &PyST_Type, &st)) { - PyObject *newargs; - PyObject *tuple; - - if ((newargs = PyTuple_Pack(2, st, Py_True)) == NULL) - return NULL; - tuple = parser_st2tuple((PyST_Object*)NULL, newargs, NULL); - if (tuple != NULL) { - result = Py_BuildValue("O(O)", pickle_constructor, tuple); - Py_DECREF(tuple); - } - Py_DECREF(newargs); - } - - return (result); -} - - -/* Functions exported by this module. Most of this should probably - * be converted into an ST object with methods, but that is better - * done directly in Python, allowing subclasses to be created directly. - * We'd really have to write a wrapper around it all anyway to allow - * inheritance. - */ -static PyMethodDef parser_functions[] = { - {"compilest", (PyCFunction)(void(*)(void))parser_compilest, PUBLIC_METHOD_TYPE, - PyDoc_STR("Compiles an ST object into a code object.")}, - {"expr", (PyCFunction)(void(*)(void))parser_expr, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates an ST object from an expression.")}, - {"isexpr", (PyCFunction)(void(*)(void))parser_isexpr, PUBLIC_METHOD_TYPE, - PyDoc_STR("Determines if an ST object was created from an expression.")}, - {"issuite", (PyCFunction)(void(*)(void))parser_issuite, PUBLIC_METHOD_TYPE, - PyDoc_STR("Determines if an ST object was created from a suite.")}, - {"suite", (PyCFunction)(void(*)(void))parser_suite, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates an ST object from a suite.")}, - {"sequence2st", (PyCFunction)(void(*)(void))parser_tuple2st, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates an ST object from a tree representation.")}, - {"st2tuple", (PyCFunction)(void(*)(void))parser_st2tuple, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates a tuple-tree representation of an ST.")}, - {"st2list", (PyCFunction)(void(*)(void))parser_st2list, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates a list-tree representation of an ST.")}, - {"tuple2st", (PyCFunction)(void(*)(void))parser_tuple2st, PUBLIC_METHOD_TYPE, - PyDoc_STR("Creates an ST object from a tree representation.")}, - - /* private stuff: support pickle module */ - {"_pickler", (PyCFunction)parser__pickler, METH_VARARGS, - PyDoc_STR("Returns the pickle magic to allow ST objects to be pickled.")}, - - {NULL, NULL, 0, NULL} - }; - - - -static struct PyModuleDef parsermodule = { - PyModuleDef_HEAD_INIT, - "parser", - NULL, - -1, - parser_functions, - NULL, - NULL, - NULL, - NULL -}; - -PyMODINIT_FUNC PyInit_parser(void); /* supply a prototype */ - -PyMODINIT_FUNC -PyInit_parser(void) -{ - PyObject *module, *copyreg; - - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "The parser module is deprecated and will be removed " - "in future versions of Python", 7) != 0) { - return NULL; - } - - if (PyType_Ready(&PyST_Type) < 0) - return NULL; - module = PyModule_Create(&parsermodule); - if (module == NULL) - return NULL; - - if (parser_error == 0) - parser_error = PyErr_NewException("parser.ParserError", NULL, NULL); - - if (parser_error == 0) - return NULL; - /* CAUTION: The code next used to skip bumping the refcount on - * parser_error. That's a disaster if PyInit_parser() gets called more - * than once. By incref'ing, we ensure that each module dict that - * gets created owns its reference to the shared parser_error object, - * and the file static parser_error vrbl owns a reference too. - */ - Py_INCREF(parser_error); - if (PyModule_AddObject(module, "ParserError", parser_error) != 0) - return NULL; - - Py_INCREF(&PyST_Type); - PyModule_AddObject(module, "STType", (PyObject*)&PyST_Type); - - PyModule_AddStringConstant(module, "__copyright__", - parser_copyright_string); - PyModule_AddStringConstant(module, "__doc__", - parser_doc_string); - PyModule_AddStringConstant(module, "__version__", - parser_version_string); - - /* Register to support pickling. - * If this fails, the import of this module will fail because an - * exception will be raised here; should we clear the exception? - */ - copyreg = PyImport_ImportModuleNoBlock("copyreg"); - if (copyreg != NULL) { - PyObject *func, *pickler; - _Py_IDENTIFIER(pickle); - _Py_IDENTIFIER(sequence2st); - _Py_IDENTIFIER(_pickler); - - func = _PyObject_GetAttrId(copyreg, &PyId_pickle); - pickle_constructor = _PyObject_GetAttrId(module, &PyId_sequence2st); - pickler = _PyObject_GetAttrId(module, &PyId__pickler); - Py_XINCREF(pickle_constructor); - if ((func != NULL) && (pickle_constructor != NULL) - && (pickler != NULL)) { - PyObject *res; - - res = PyObject_CallFunctionObjArgs(func, &PyST_Type, pickler, - pickle_constructor, NULL); - Py_XDECREF(res); - } - Py_XDECREF(func); - Py_XDECREF(pickle_constructor); - Py_XDECREF(pickler); - Py_DECREF(copyreg); - } - return module; -} diff --git a/PC/config.c b/PC/config.c index 32af2a81aeb41..87cd76d37bede 100644 --- a/PC/config.c +++ b/PC/config.c @@ -45,7 +45,6 @@ extern PyObject* PyInit__symtable(void); extern PyObject* PyInit_mmap(void); extern PyObject* PyInit__csv(void); extern PyObject* PyInit__sre(void); -extern PyObject* PyInit_parser(void); extern PyObject* PyInit_winreg(void); extern PyObject* PyInit__struct(void); extern PyObject* PyInit__datetime(void); @@ -75,7 +74,6 @@ extern PyObject* PyInit__opcode(void); extern PyObject* PyInit__contextvars(void); -extern PyObject* PyInit__peg_parser(void); /* tools/freeze/makeconfig.py marker for additional "extern" */ /* -- ADDMODULE MARKER 1 -- */ @@ -127,7 +125,6 @@ struct _inittab _PyImport_Inittab[] = { {"mmap", PyInit_mmap}, {"_csv", PyInit__csv}, {"_sre", PyInit__sre}, - {"parser", PyInit_parser}, {"winreg", PyInit_winreg}, {"_struct", PyInit__struct}, {"_datetime", PyInit__datetime}, @@ -171,7 +168,6 @@ struct _inittab _PyImport_Inittab[] = { {"_opcode", PyInit__opcode}, {"_contextvars", PyInit__contextvars}, - {"_peg_parser", PyInit__peg_parser}, /* Sentinel */ {0, 0} diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 8d5f99f8336a3..2653ce9111d18 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -279,10 +279,9 @@ - - - + + @@ -343,8 +342,6 @@ - - @@ -417,19 +414,13 @@ - - - - - - - - - - + + + + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 7bc9f8f166456..6ea7afaa03681 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -522,9 +522,6 @@ Objects - - Parser - Parser @@ -908,12 +905,6 @@ Objects - - Parser - - - Parser - Parser @@ -926,21 +917,9 @@ Parser - - Parser - Parser - - Parser - - - Parser - - - Parser - Parser diff --git a/PCbuild/regen.vcxproj b/PCbuild/regen.vcxproj index 564a4dd71188c..ea246ffc8ff25 100644 --- a/PCbuild/regen.vcxproj +++ b/PCbuild/regen.vcxproj @@ -155,22 +155,11 @@ - - - - - - - - - - - - + - - + + diff --git a/Parser/acceler.c b/Parser/acceler.c deleted file mode 100644 index e515833e1dda1..0000000000000 --- a/Parser/acceler.c +++ /dev/null @@ -1,123 +0,0 @@ - -/* Parser accelerator module */ - -/* The parser as originally conceived had disappointing performance. - This module does some precomputation that speeds up the selection - of a DFA based upon a token, turning a search through an array - into a simple indexing operation. The parser now cannot work - without the accelerators installed. Note that the accelerators - are installed dynamically when the parser is initialized, they - are not part of the static data structure written on graminit.[ch] - by the parser generator. */ - -#include "Python.h" -#include "grammar.h" -#include "node.h" -#include "token.h" -#include "parser.h" - -/* Forward references */ -static void fixdfa(grammar *, const dfa *); -static void fixstate(grammar *, state *); - -void -PyGrammar_AddAccelerators(grammar *g) -{ - int i; - const dfa *d = g->g_dfa; - for (i = g->g_ndfas; --i >= 0; d++) - fixdfa(g, d); - g->g_accel = 1; -} - -void -PyGrammar_RemoveAccelerators(grammar *g) -{ - int i; - g->g_accel = 0; - const dfa *d = g->g_dfa; - for (i = g->g_ndfas; --i >= 0; d++) { - state *s; - int j; - s = d->d_state; - for (j = 0; j < d->d_nstates; j++, s++) { - if (s->s_accel) - PyObject_FREE(s->s_accel); - s->s_accel = NULL; - } - } -} - -static void -fixdfa(grammar *g, const dfa *d) -{ - state *s; - int j; - s = d->d_state; - for (j = 0; j < d->d_nstates; j++, s++) - fixstate(g, s); -} - -static void -fixstate(grammar *g, state *s) -{ - const arc *a; - int k; - int *accel; - int nl = g->g_ll.ll_nlabels; - s->s_accept = 0; - accel = (int *) PyObject_MALLOC(nl * sizeof(int)); - if (accel == NULL) { - fprintf(stderr, "no mem to build parser accelerators\n"); - exit(1); - } - for (k = 0; k < nl; k++) - accel[k] = -1; - a = s->s_arc; - for (k = s->s_narcs; --k >= 0; a++) { - int lbl = a->a_lbl; - const label *l = &g->g_ll.ll_label[lbl]; - int type = l->lb_type; - if (a->a_arrow >= (1 << 7)) { - printf("XXX too many states!\n"); - continue; - } - if (ISNONTERMINAL(type)) { - const dfa *d1 = PyGrammar_FindDFA(g, type); - int ibit; - if (type - NT_OFFSET >= (1 << 7)) { - printf("XXX too high nonterminal number!\n"); - continue; - } - for (ibit = 0; ibit < g->g_ll.ll_nlabels; ibit++) { - if (testbit(d1->d_first, ibit)) { - if (accel[ibit] != -1) - printf("XXX ambiguity!\n"); - accel[ibit] = a->a_arrow | (1 << 7) | - ((type - NT_OFFSET) << 8); - } - } - } - else if (lbl == EMPTY) - s->s_accept = 1; - else if (lbl >= 0 && lbl < nl) - accel[lbl] = a->a_arrow; - } - while (nl > 0 && accel[nl-1] == -1) - nl--; - for (k = 0; k < nl && accel[k] == -1;) - k++; - if (k < nl) { - int i; - s->s_accel = (int *) PyObject_MALLOC((nl-k) * sizeof(int)); - if (s->s_accel == NULL) { - fprintf(stderr, "no mem to add parser accelerators\n"); - exit(1); - } - s->s_lower = k; - s->s_upper = nl; - for (i = 0; k < nl; i++, k++) - s->s_accel[i] = accel[k]; - } - PyObject_FREE(accel); -} diff --git a/Parser/grammar1.c b/Parser/grammar1.c deleted file mode 100644 index c702040efdfab..0000000000000 --- a/Parser/grammar1.c +++ /dev/null @@ -1,47 +0,0 @@ - -/* Grammar subroutines needed by parser */ - -#include "Python.h" -#include "grammar.h" -#include "token.h" - -/* Return the DFA for the given type */ - -const dfa * -PyGrammar_FindDFA(grammar *g, int type) -{ - /* Massive speed-up */ - const dfa *d = &g->g_dfa[type - NT_OFFSET]; - assert(d->d_type == type); - return d; -} - -const char * -PyGrammar_LabelRepr(label *lb) -{ - static char buf[100]; - - if (lb->lb_type == ENDMARKER) - return "EMPTY"; - else if (ISNONTERMINAL(lb->lb_type)) { - if (lb->lb_str == NULL) { - PyOS_snprintf(buf, sizeof(buf), "NT%d", lb->lb_type); - return buf; - } - else - return lb->lb_str; - } - else if (lb->lb_type < N_TOKENS) { - if (lb->lb_str == NULL) - return _PyParser_TokenNames[lb->lb_type]; - else { - PyOS_snprintf(buf, sizeof(buf), "%.32s(%.32s)", - _PyParser_TokenNames[lb->lb_type], lb->lb_str); - return buf; - } - } - else { - Py_FatalError("invalid grammar label"); - return NULL; - } -} diff --git a/Parser/listnode.c b/Parser/listnode.c deleted file mode 100644 index 41e7a033a1fa6..0000000000000 --- a/Parser/listnode.c +++ /dev/null @@ -1,71 +0,0 @@ - -/* List a node on a file */ - -#include "Python.h" -#include "pycore_interp.h" // PyInterpreterState.parser -#include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "token.h" -#include "node.h" - -/* Forward */ -static void list1node(FILE *, node *); -static void listnode(FILE *, node *); - -void -PyNode_ListTree(node *n) -{ - listnode(stdout, n); -} - -static void -listnode(FILE *fp, node *n) -{ - PyInterpreterState *interp = _PyInterpreterState_GET(); - - interp->parser.listnode.level = 0; - interp->parser.listnode.atbol = 1; - list1node(fp, n); -} - -static void -list1node(FILE *fp, node *n) -{ - if (n == NULL) - return; - if (ISNONTERMINAL(TYPE(n))) { - int i; - for (i = 0; i < NCH(n); i++) - list1node(fp, CHILD(n, i)); - } - else if (ISTERMINAL(TYPE(n))) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_parser_state *parser = &interp->parser; - switch (TYPE(n)) { - case INDENT: - parser->listnode.level++; - break; - case DEDENT: - parser->listnode.level--; - break; - default: - if (parser->listnode.atbol) { - int i; - for (i = 0; i < parser->listnode.level; ++i) { - fprintf(fp, "\t"); - } - parser->listnode.atbol = 0; - } - if (TYPE(n) == NEWLINE) { - if (STR(n) != NULL) - fprintf(fp, "%s", STR(n)); - fprintf(fp, "\n"); - parser->listnode.atbol = 1; - } - else - fprintf(fp, "%s ", STR(n)); - break; - } - } - else - fprintf(fp, "? "); -} diff --git a/Parser/parser.c b/Parser/parser.c index a61b2f5ebf7a1..d28e6c83aadb0 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -1,463 +1,24415 @@ +// @generated by pegen.py from ./Grammar/python.gram +#include "pegen.h" -/* Parser implementation */ +#if defined(Py_DEBUG) && defined(Py_BUILD_CORE) +extern int Py_DebugFlag; +#define D(x) if (Py_DebugFlag) x; +#else +#define D(x) +#endif +static const int n_keyword_lists = 9; +static KeywordToken *reserved_keywords[] = { + NULL, + NULL, + (KeywordToken[]) { + {"if", 510}, + {"in", 518}, + {"is", 526}, + {"as", 530}, + {"or", 531}, + {NULL, -1}, + }, + (KeywordToken[]) { + {"del", 503}, + {"try", 511}, + {"for", 517}, + {"def", 522}, + {"not", 525}, + {"and", 532}, + {NULL, -1}, + }, + (KeywordToken[]) { + {"pass", 502}, + {"from", 514}, + {"elif", 515}, + {"else", 516}, + {"with", 519}, + {"True", 527}, + {"None", 529}, + {NULL, -1}, + }, + (KeywordToken[]) { + {"raise", 501}, + {"yield", 504}, + {"break", 506}, + {"while", 512}, + {"class", 523}, + {"False", 528}, + {NULL, -1}, + }, + (KeywordToken[]) { + {"return", 500}, + {"assert", 505}, + {"global", 508}, + {"import", 513}, + {"except", 520}, + {"lambda", 524}, + {NULL, -1}, + }, + (KeywordToken[]) { + {"finally", 521}, + {NULL, -1}, + }, + (KeywordToken[]) { + {"continue", 507}, + {"nonlocal", 509}, + {NULL, -1}, + }, +}; +#define file_type 1000 +#define interactive_type 1001 +#define eval_type 1002 +#define func_type_type 1003 +#define fstring_type 1004 +#define type_expressions_type 1005 +#define statements_type 1006 +#define statement_type 1007 +#define statement_newline_type 1008 +#define simple_stmt_type 1009 +#define small_stmt_type 1010 +#define compound_stmt_type 1011 +#define assignment_type 1012 +#define augassign_type 1013 +#define global_stmt_type 1014 +#define nonlocal_stmt_type 1015 +#define yield_stmt_type 1016 +#define assert_stmt_type 1017 +#define del_stmt_type 1018 +#define import_stmt_type 1019 +#define import_name_type 1020 +#define import_from_type 1021 +#define import_from_targets_type 1022 +#define import_from_as_names_type 1023 +#define import_from_as_name_type 1024 +#define dotted_as_names_type 1025 +#define dotted_as_name_type 1026 +#define dotted_name_type 1027 // Left-recursive +#define if_stmt_type 1028 +#define elif_stmt_type 1029 +#define else_block_type 1030 +#define while_stmt_type 1031 +#define for_stmt_type 1032 +#define with_stmt_type 1033 +#define with_item_type 1034 +#define try_stmt_type 1035 +#define except_block_type 1036 +#define finally_block_type 1037 +#define return_stmt_type 1038 +#define raise_stmt_type 1039 +#define function_def_type 1040 +#define function_def_raw_type 1041 +#define func_type_comment_type 1042 +#define params_type 1043 +#define parameters_type 1044 +#define slash_no_default_type 1045 +#define slash_with_default_type 1046 +#define star_etc_type 1047 +#define kwds_type 1048 +#define param_no_default_type 1049 +#define param_with_default_type 1050 +#define param_maybe_default_type 1051 +#define param_type 1052 +#define annotation_type 1053 +#define default_type 1054 +#define decorators_type 1055 +#define class_def_type 1056 +#define class_def_raw_type 1057 +#define block_type 1058 +#define expressions_list_type 1059 +#define star_expressions_type 1060 +#define star_expression_type 1061 +#define star_named_expressions_type 1062 +#define star_named_expression_type 1063 +#define named_expression_type 1064 +#define annotated_rhs_type 1065 +#define expressions_type 1066 +#define expression_type 1067 +#define lambdef_type 1068 +#define lambda_params_type 1069 +#define lambda_parameters_type 1070 +#define lambda_slash_no_default_type 1071 +#define lambda_slash_with_default_type 1072 +#define lambda_star_etc_type 1073 +#define lambda_kwds_type 1074 +#define lambda_param_no_default_type 1075 +#define lambda_param_with_default_type 1076 +#define lambda_param_maybe_default_type 1077 +#define lambda_param_type 1078 +#define disjunction_type 1079 +#define conjunction_type 1080 +#define inversion_type 1081 +#define comparison_type 1082 +#define compare_op_bitwise_or_pair_type 1083 +#define eq_bitwise_or_type 1084 +#define noteq_bitwise_or_type 1085 +#define lte_bitwise_or_type 1086 +#define lt_bitwise_or_type 1087 +#define gte_bitwise_or_type 1088 +#define gt_bitwise_or_type 1089 +#define notin_bitwise_or_type 1090 +#define in_bitwise_or_type 1091 +#define isnot_bitwise_or_type 1092 +#define is_bitwise_or_type 1093 +#define bitwise_or_type 1094 // Left-recursive +#define bitwise_xor_type 1095 // Left-recursive +#define bitwise_and_type 1096 // Left-recursive +#define shift_expr_type 1097 // Left-recursive +#define sum_type 1098 // Left-recursive +#define term_type 1099 // Left-recursive +#define factor_type 1100 +#define power_type 1101 +#define await_primary_type 1102 +#define primary_type 1103 // Left-recursive +#define slices_type 1104 +#define slice_type 1105 +#define atom_type 1106 +#define strings_type 1107 +#define list_type 1108 +#define listcomp_type 1109 +#define tuple_type 1110 +#define group_type 1111 +#define genexp_type 1112 +#define set_type 1113 +#define setcomp_type 1114 +#define dict_type 1115 +#define dictcomp_type 1116 +#define double_starred_kvpairs_type 1117 +#define double_starred_kvpair_type 1118 +#define kvpair_type 1119 +#define for_if_clauses_type 1120 +#define for_if_clause_type 1121 +#define yield_expr_type 1122 +#define arguments_type 1123 +#define args_type 1124 +#define kwargs_type 1125 +#define starred_expression_type 1126 +#define kwarg_or_starred_type 1127 +#define kwarg_or_double_starred_type 1128 +#define star_targets_type 1129 +#define star_targets_seq_type 1130 +#define star_target_type 1131 +#define star_atom_type 1132 +#define single_target_type 1133 +#define single_subscript_attribute_target_type 1134 +#define del_targets_type 1135 +#define del_target_type 1136 +#define del_t_atom_type 1137 +#define del_target_end_type 1138 +#define targets_type 1139 +#define target_type 1140 +#define t_primary_type 1141 // Left-recursive +#define t_lookahead_type 1142 +#define t_atom_type 1143 +#define incorrect_arguments_type 1144 +#define invalid_kwarg_type 1145 +#define invalid_named_expression_type 1146 +#define invalid_assignment_type 1147 +#define invalid_block_type 1148 +#define invalid_comprehension_type 1149 +#define invalid_dict_comprehension_type 1150 +#define invalid_parameters_type 1151 +#define invalid_lambda_parameters_type 1152 +#define invalid_star_etc_type 1153 +#define invalid_lambda_star_etc_type 1154 +#define invalid_double_type_comments_type 1155 +#define invalid_del_target_type 1156 +#define invalid_import_from_targets_type 1157 +#define _loop0_1_type 1158 +#define _loop0_2_type 1159 +#define _loop0_4_type 1160 +#define _gather_3_type 1161 +#define _loop0_6_type 1162 +#define _gather_5_type 1163 +#define _loop0_8_type 1164 +#define _gather_7_type 1165 +#define _loop0_10_type 1166 +#define _gather_9_type 1167 +#define _loop1_11_type 1168 +#define _loop0_13_type 1169 +#define _gather_12_type 1170 +#define _tmp_14_type 1171 +#define _tmp_15_type 1172 +#define _tmp_16_type 1173 +#define _tmp_17_type 1174 +#define _tmp_18_type 1175 +#define _tmp_19_type 1176 +#define _tmp_20_type 1177 +#define _tmp_21_type 1178 +#define _loop1_22_type 1179 +#define _tmp_23_type 1180 +#define _tmp_24_type 1181 +#define _loop0_26_type 1182 +#define _gather_25_type 1183 +#define _loop0_28_type 1184 +#define _gather_27_type 1185 +#define _tmp_29_type 1186 +#define _loop0_30_type 1187 +#define _loop1_31_type 1188 +#define _loop0_33_type 1189 +#define _gather_32_type 1190 +#define _tmp_34_type 1191 +#define _loop0_36_type 1192 +#define _gather_35_type 1193 +#define _tmp_37_type 1194 +#define _loop0_39_type 1195 +#define _gather_38_type 1196 +#define _loop0_41_type 1197 +#define _gather_40_type 1198 +#define _loop0_43_type 1199 +#define _gather_42_type 1200 +#define _loop0_45_type 1201 +#define _gather_44_type 1202 +#define _tmp_46_type 1203 +#define _loop1_47_type 1204 +#define _tmp_48_type 1205 +#define _tmp_49_type 1206 +#define _tmp_50_type 1207 +#define _tmp_51_type 1208 +#define _tmp_52_type 1209 +#define _loop0_53_type 1210 +#define _loop0_54_type 1211 +#define _loop0_55_type 1212 +#define _loop1_56_type 1213 +#define _loop0_57_type 1214 +#define _loop1_58_type 1215 +#define _loop1_59_type 1216 +#define _loop1_60_type 1217 +#define _loop0_61_type 1218 +#define _loop1_62_type 1219 +#define _loop0_63_type 1220 +#define _loop1_64_type 1221 +#define _loop0_65_type 1222 +#define _loop1_66_type 1223 +#define _loop1_67_type 1224 +#define _tmp_68_type 1225 +#define _loop0_70_type 1226 +#define _gather_69_type 1227 +#define _loop1_71_type 1228 +#define _loop0_73_type 1229 +#define _gather_72_type 1230 +#define _loop1_74_type 1231 +#define _loop0_75_type 1232 +#define _loop0_76_type 1233 +#define _loop0_77_type 1234 +#define _loop1_78_type 1235 +#define _loop0_79_type 1236 +#define _loop1_80_type 1237 +#define _loop1_81_type 1238 +#define _loop1_82_type 1239 +#define _loop0_83_type 1240 +#define _loop1_84_type 1241 +#define _loop0_85_type 1242 +#define _loop1_86_type 1243 +#define _loop0_87_type 1244 +#define _loop1_88_type 1245 +#define _loop1_89_type 1246 +#define _loop1_90_type 1247 +#define _loop1_91_type 1248 +#define _tmp_92_type 1249 +#define _loop0_94_type 1250 +#define _gather_93_type 1251 +#define _tmp_95_type 1252 +#define _tmp_96_type 1253 +#define _tmp_97_type 1254 +#define _tmp_98_type 1255 +#define _loop1_99_type 1256 +#define _tmp_100_type 1257 +#define _tmp_101_type 1258 +#define _loop0_103_type 1259 +#define _gather_102_type 1260 +#define _loop1_104_type 1261 +#define _loop0_105_type 1262 +#define _loop0_106_type 1263 +#define _tmp_107_type 1264 +#define _tmp_108_type 1265 +#define _loop0_110_type 1266 +#define _gather_109_type 1267 +#define _loop0_112_type 1268 +#define _gather_111_type 1269 +#define _loop0_114_type 1270 +#define _gather_113_type 1271 +#define _loop0_116_type 1272 +#define _gather_115_type 1273 +#define _loop0_117_type 1274 +#define _loop0_119_type 1275 +#define _gather_118_type 1276 +#define _tmp_120_type 1277 +#define _loop0_122_type 1278 +#define _gather_121_type 1279 +#define _loop0_124_type 1280 +#define _gather_123_type 1281 +#define _tmp_125_type 1282 +#define _loop0_126_type 1283 +#define _tmp_127_type 1284 +#define _loop0_128_type 1285 +#define _loop0_129_type 1286 +#define _tmp_130_type 1287 +#define _tmp_131_type 1288 +#define _loop0_132_type 1289 +#define _tmp_133_type 1290 +#define _loop0_134_type 1291 +#define _tmp_135_type 1292 +#define _tmp_136_type 1293 +#define _tmp_137_type 1294 +#define _tmp_138_type 1295 +#define _tmp_139_type 1296 +#define _tmp_140_type 1297 +#define _tmp_141_type 1298 +#define _tmp_142_type 1299 +#define _tmp_143_type 1300 +#define _tmp_144_type 1301 +#define _tmp_145_type 1302 +#define _tmp_146_type 1303 +#define _tmp_147_type 1304 +#define _tmp_148_type 1305 +#define _tmp_149_type 1306 +#define _tmp_150_type 1307 +#define _loop1_151_type 1308 +#define _loop1_152_type 1309 +#define _tmp_153_type 1310 +#define _tmp_154_type 1311 + +static mod_ty file_rule(Parser *p); +static mod_ty interactive_rule(Parser *p); +static mod_ty eval_rule(Parser *p); +static mod_ty func_type_rule(Parser *p); +static expr_ty fstring_rule(Parser *p); +static asdl_seq* type_expressions_rule(Parser *p); +static asdl_seq* statements_rule(Parser *p); +static asdl_seq* statement_rule(Parser *p); +static asdl_seq* statement_newline_rule(Parser *p); +static asdl_seq* simple_stmt_rule(Parser *p); +static stmt_ty small_stmt_rule(Parser *p); +static stmt_ty compound_stmt_rule(Parser *p); +static stmt_ty assignment_rule(Parser *p); +static AugOperator* augassign_rule(Parser *p); +static stmt_ty global_stmt_rule(Parser *p); +static stmt_ty nonlocal_stmt_rule(Parser *p); +static stmt_ty yield_stmt_rule(Parser *p); +static stmt_ty assert_stmt_rule(Parser *p); +static stmt_ty del_stmt_rule(Parser *p); +static stmt_ty import_stmt_rule(Parser *p); +static stmt_ty import_name_rule(Parser *p); +static stmt_ty import_from_rule(Parser *p); +static asdl_seq* import_from_targets_rule(Parser *p); +static asdl_seq* import_from_as_names_rule(Parser *p); +static alias_ty import_from_as_name_rule(Parser *p); +static asdl_seq* dotted_as_names_rule(Parser *p); +static alias_ty dotted_as_name_rule(Parser *p); +static expr_ty dotted_name_rule(Parser *p); +static stmt_ty if_stmt_rule(Parser *p); +static stmt_ty elif_stmt_rule(Parser *p); +static asdl_seq* else_block_rule(Parser *p); +static stmt_ty while_stmt_rule(Parser *p); +static stmt_ty for_stmt_rule(Parser *p); +static stmt_ty with_stmt_rule(Parser *p); +static withitem_ty with_item_rule(Parser *p); +static stmt_ty try_stmt_rule(Parser *p); +static excepthandler_ty except_block_rule(Parser *p); +static asdl_seq* finally_block_rule(Parser *p); +static stmt_ty return_stmt_rule(Parser *p); +static stmt_ty raise_stmt_rule(Parser *p); +static stmt_ty function_def_rule(Parser *p); +static stmt_ty function_def_raw_rule(Parser *p); +static Token* func_type_comment_rule(Parser *p); +static arguments_ty params_rule(Parser *p); +static arguments_ty parameters_rule(Parser *p); +static asdl_seq* slash_no_default_rule(Parser *p); +static SlashWithDefault* slash_with_default_rule(Parser *p); +static StarEtc* star_etc_rule(Parser *p); +static arg_ty kwds_rule(Parser *p); +static arg_ty param_no_default_rule(Parser *p); +static NameDefaultPair* param_with_default_rule(Parser *p); +static NameDefaultPair* param_maybe_default_rule(Parser *p); +static arg_ty param_rule(Parser *p); +static expr_ty annotation_rule(Parser *p); +static expr_ty default_rule(Parser *p); +static asdl_seq* decorators_rule(Parser *p); +static stmt_ty class_def_rule(Parser *p); +static stmt_ty class_def_raw_rule(Parser *p); +static asdl_seq* block_rule(Parser *p); +static asdl_seq* expressions_list_rule(Parser *p); +static expr_ty star_expressions_rule(Parser *p); +static expr_ty star_expression_rule(Parser *p); +static asdl_seq* star_named_expressions_rule(Parser *p); +static expr_ty star_named_expression_rule(Parser *p); +static expr_ty named_expression_rule(Parser *p); +static expr_ty annotated_rhs_rule(Parser *p); +static expr_ty expressions_rule(Parser *p); +static expr_ty expression_rule(Parser *p); +static expr_ty lambdef_rule(Parser *p); +static arguments_ty lambda_params_rule(Parser *p); +static arguments_ty lambda_parameters_rule(Parser *p); +static asdl_seq* lambda_slash_no_default_rule(Parser *p); +static SlashWithDefault* lambda_slash_with_default_rule(Parser *p); +static StarEtc* lambda_star_etc_rule(Parser *p); +static arg_ty lambda_kwds_rule(Parser *p); +static arg_ty lambda_param_no_default_rule(Parser *p); +static NameDefaultPair* lambda_param_with_default_rule(Parser *p); +static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p); +static arg_ty lambda_param_rule(Parser *p); +static expr_ty disjunction_rule(Parser *p); +static expr_ty conjunction_rule(Parser *p); +static expr_ty inversion_rule(Parser *p); +static expr_ty comparison_rule(Parser *p); +static CmpopExprPair* compare_op_bitwise_or_pair_rule(Parser *p); +static CmpopExprPair* eq_bitwise_or_rule(Parser *p); +static CmpopExprPair* noteq_bitwise_or_rule(Parser *p); +static CmpopExprPair* lte_bitwise_or_rule(Parser *p); +static CmpopExprPair* lt_bitwise_or_rule(Parser *p); +static CmpopExprPair* gte_bitwise_or_rule(Parser *p); +static CmpopExprPair* gt_bitwise_or_rule(Parser *p); +static CmpopExprPair* notin_bitwise_or_rule(Parser *p); +static CmpopExprPair* in_bitwise_or_rule(Parser *p); +static CmpopExprPair* isnot_bitwise_or_rule(Parser *p); +static CmpopExprPair* is_bitwise_or_rule(Parser *p); +static expr_ty bitwise_or_rule(Parser *p); +static expr_ty bitwise_xor_rule(Parser *p); +static expr_ty bitwise_and_rule(Parser *p); +static expr_ty shift_expr_rule(Parser *p); +static expr_ty sum_rule(Parser *p); +static expr_ty term_rule(Parser *p); +static expr_ty factor_rule(Parser *p); +static expr_ty power_rule(Parser *p); +static expr_ty await_primary_rule(Parser *p); +static expr_ty primary_rule(Parser *p); +static expr_ty slices_rule(Parser *p); +static expr_ty slice_rule(Parser *p); +static expr_ty atom_rule(Parser *p); +static expr_ty strings_rule(Parser *p); +static expr_ty list_rule(Parser *p); +static expr_ty listcomp_rule(Parser *p); +static expr_ty tuple_rule(Parser *p); +static expr_ty group_rule(Parser *p); +static expr_ty genexp_rule(Parser *p); +static expr_ty set_rule(Parser *p); +static expr_ty setcomp_rule(Parser *p); +static expr_ty dict_rule(Parser *p); +static expr_ty dictcomp_rule(Parser *p); +static asdl_seq* double_starred_kvpairs_rule(Parser *p); +static KeyValuePair* double_starred_kvpair_rule(Parser *p); +static KeyValuePair* kvpair_rule(Parser *p); +static asdl_seq* for_if_clauses_rule(Parser *p); +static comprehension_ty for_if_clause_rule(Parser *p); +static expr_ty yield_expr_rule(Parser *p); +static expr_ty arguments_rule(Parser *p); +static expr_ty args_rule(Parser *p); +static asdl_seq* kwargs_rule(Parser *p); +static expr_ty starred_expression_rule(Parser *p); +static KeywordOrStarred* kwarg_or_starred_rule(Parser *p); +static KeywordOrStarred* kwarg_or_double_starred_rule(Parser *p); +static expr_ty star_targets_rule(Parser *p); +static asdl_seq* star_targets_seq_rule(Parser *p); +static expr_ty star_target_rule(Parser *p); +static expr_ty star_atom_rule(Parser *p); +static expr_ty single_target_rule(Parser *p); +static expr_ty single_subscript_attribute_target_rule(Parser *p); +static asdl_seq* del_targets_rule(Parser *p); +static expr_ty del_target_rule(Parser *p); +static expr_ty del_t_atom_rule(Parser *p); +static void *del_target_end_rule(Parser *p); +static asdl_seq* targets_rule(Parser *p); +static expr_ty target_rule(Parser *p); +static expr_ty t_primary_rule(Parser *p); +static void *t_lookahead_rule(Parser *p); +static expr_ty t_atom_rule(Parser *p); +static void *incorrect_arguments_rule(Parser *p); +static void *invalid_kwarg_rule(Parser *p); +static void *invalid_named_expression_rule(Parser *p); +static void *invalid_assignment_rule(Parser *p); +static void *invalid_block_rule(Parser *p); +static void *invalid_comprehension_rule(Parser *p); +static void *invalid_dict_comprehension_rule(Parser *p); +static void *invalid_parameters_rule(Parser *p); +static void *invalid_lambda_parameters_rule(Parser *p); +static void *invalid_star_etc_rule(Parser *p); +static void *invalid_lambda_star_etc_rule(Parser *p); +static void *invalid_double_type_comments_rule(Parser *p); +static void *invalid_del_target_rule(Parser *p); +static void *invalid_import_from_targets_rule(Parser *p); +static asdl_seq *_loop0_1_rule(Parser *p); +static asdl_seq *_loop0_2_rule(Parser *p); +static asdl_seq *_loop0_4_rule(Parser *p); +static asdl_seq *_gather_3_rule(Parser *p); +static asdl_seq *_loop0_6_rule(Parser *p); +static asdl_seq *_gather_5_rule(Parser *p); +static asdl_seq *_loop0_8_rule(Parser *p); +static asdl_seq *_gather_7_rule(Parser *p); +static asdl_seq *_loop0_10_rule(Parser *p); +static asdl_seq *_gather_9_rule(Parser *p); +static asdl_seq *_loop1_11_rule(Parser *p); +static asdl_seq *_loop0_13_rule(Parser *p); +static asdl_seq *_gather_12_rule(Parser *p); +static void *_tmp_14_rule(Parser *p); +static void *_tmp_15_rule(Parser *p); +static void *_tmp_16_rule(Parser *p); +static void *_tmp_17_rule(Parser *p); +static void *_tmp_18_rule(Parser *p); +static void *_tmp_19_rule(Parser *p); +static void *_tmp_20_rule(Parser *p); +static void *_tmp_21_rule(Parser *p); +static asdl_seq *_loop1_22_rule(Parser *p); +static void *_tmp_23_rule(Parser *p); +static void *_tmp_24_rule(Parser *p); +static asdl_seq *_loop0_26_rule(Parser *p); +static asdl_seq *_gather_25_rule(Parser *p); +static asdl_seq *_loop0_28_rule(Parser *p); +static asdl_seq *_gather_27_rule(Parser *p); +static void *_tmp_29_rule(Parser *p); +static asdl_seq *_loop0_30_rule(Parser *p); +static asdl_seq *_loop1_31_rule(Parser *p); +static asdl_seq *_loop0_33_rule(Parser *p); +static asdl_seq *_gather_32_rule(Parser *p); +static void *_tmp_34_rule(Parser *p); +static asdl_seq *_loop0_36_rule(Parser *p); +static asdl_seq *_gather_35_rule(Parser *p); +static void *_tmp_37_rule(Parser *p); +static asdl_seq *_loop0_39_rule(Parser *p); +static asdl_seq *_gather_38_rule(Parser *p); +static asdl_seq *_loop0_41_rule(Parser *p); +static asdl_seq *_gather_40_rule(Parser *p); +static asdl_seq *_loop0_43_rule(Parser *p); +static asdl_seq *_gather_42_rule(Parser *p); +static asdl_seq *_loop0_45_rule(Parser *p); +static asdl_seq *_gather_44_rule(Parser *p); +static void *_tmp_46_rule(Parser *p); +static asdl_seq *_loop1_47_rule(Parser *p); +static void *_tmp_48_rule(Parser *p); +static void *_tmp_49_rule(Parser *p); +static void *_tmp_50_rule(Parser *p); +static void *_tmp_51_rule(Parser *p); +static void *_tmp_52_rule(Parser *p); +static asdl_seq *_loop0_53_rule(Parser *p); +static asdl_seq *_loop0_54_rule(Parser *p); +static asdl_seq *_loop0_55_rule(Parser *p); +static asdl_seq *_loop1_56_rule(Parser *p); +static asdl_seq *_loop0_57_rule(Parser *p); +static asdl_seq *_loop1_58_rule(Parser *p); +static asdl_seq *_loop1_59_rule(Parser *p); +static asdl_seq *_loop1_60_rule(Parser *p); +static asdl_seq *_loop0_61_rule(Parser *p); +static asdl_seq *_loop1_62_rule(Parser *p); +static asdl_seq *_loop0_63_rule(Parser *p); +static asdl_seq *_loop1_64_rule(Parser *p); +static asdl_seq *_loop0_65_rule(Parser *p); +static asdl_seq *_loop1_66_rule(Parser *p); +static asdl_seq *_loop1_67_rule(Parser *p); +static void *_tmp_68_rule(Parser *p); +static asdl_seq *_loop0_70_rule(Parser *p); +static asdl_seq *_gather_69_rule(Parser *p); +static asdl_seq *_loop1_71_rule(Parser *p); +static asdl_seq *_loop0_73_rule(Parser *p); +static asdl_seq *_gather_72_rule(Parser *p); +static asdl_seq *_loop1_74_rule(Parser *p); +static asdl_seq *_loop0_75_rule(Parser *p); +static asdl_seq *_loop0_76_rule(Parser *p); +static asdl_seq *_loop0_77_rule(Parser *p); +static asdl_seq *_loop1_78_rule(Parser *p); +static asdl_seq *_loop0_79_rule(Parser *p); +static asdl_seq *_loop1_80_rule(Parser *p); +static asdl_seq *_loop1_81_rule(Parser *p); +static asdl_seq *_loop1_82_rule(Parser *p); +static asdl_seq *_loop0_83_rule(Parser *p); +static asdl_seq *_loop1_84_rule(Parser *p); +static asdl_seq *_loop0_85_rule(Parser *p); +static asdl_seq *_loop1_86_rule(Parser *p); +static asdl_seq *_loop0_87_rule(Parser *p); +static asdl_seq *_loop1_88_rule(Parser *p); +static asdl_seq *_loop1_89_rule(Parser *p); +static asdl_seq *_loop1_90_rule(Parser *p); +static asdl_seq *_loop1_91_rule(Parser *p); +static void *_tmp_92_rule(Parser *p); +static asdl_seq *_loop0_94_rule(Parser *p); +static asdl_seq *_gather_93_rule(Parser *p); +static void *_tmp_95_rule(Parser *p); +static void *_tmp_96_rule(Parser *p); +static void *_tmp_97_rule(Parser *p); +static void *_tmp_98_rule(Parser *p); +static asdl_seq *_loop1_99_rule(Parser *p); +static void *_tmp_100_rule(Parser *p); +static void *_tmp_101_rule(Parser *p); +static asdl_seq *_loop0_103_rule(Parser *p); +static asdl_seq *_gather_102_rule(Parser *p); +static asdl_seq *_loop1_104_rule(Parser *p); +static asdl_seq *_loop0_105_rule(Parser *p); +static asdl_seq *_loop0_106_rule(Parser *p); +static void *_tmp_107_rule(Parser *p); +static void *_tmp_108_rule(Parser *p); +static asdl_seq *_loop0_110_rule(Parser *p); +static asdl_seq *_gather_109_rule(Parser *p); +static asdl_seq *_loop0_112_rule(Parser *p); +static asdl_seq *_gather_111_rule(Parser *p); +static asdl_seq *_loop0_114_rule(Parser *p); +static asdl_seq *_gather_113_rule(Parser *p); +static asdl_seq *_loop0_116_rule(Parser *p); +static asdl_seq *_gather_115_rule(Parser *p); +static asdl_seq *_loop0_117_rule(Parser *p); +static asdl_seq *_loop0_119_rule(Parser *p); +static asdl_seq *_gather_118_rule(Parser *p); +static void *_tmp_120_rule(Parser *p); +static asdl_seq *_loop0_122_rule(Parser *p); +static asdl_seq *_gather_121_rule(Parser *p); +static asdl_seq *_loop0_124_rule(Parser *p); +static asdl_seq *_gather_123_rule(Parser *p); +static void *_tmp_125_rule(Parser *p); +static asdl_seq *_loop0_126_rule(Parser *p); +static void *_tmp_127_rule(Parser *p); +static asdl_seq *_loop0_128_rule(Parser *p); +static asdl_seq *_loop0_129_rule(Parser *p); +static void *_tmp_130_rule(Parser *p); +static void *_tmp_131_rule(Parser *p); +static asdl_seq *_loop0_132_rule(Parser *p); +static void *_tmp_133_rule(Parser *p); +static asdl_seq *_loop0_134_rule(Parser *p); +static void *_tmp_135_rule(Parser *p); +static void *_tmp_136_rule(Parser *p); +static void *_tmp_137_rule(Parser *p); +static void *_tmp_138_rule(Parser *p); +static void *_tmp_139_rule(Parser *p); +static void *_tmp_140_rule(Parser *p); +static void *_tmp_141_rule(Parser *p); +static void *_tmp_142_rule(Parser *p); +static void *_tmp_143_rule(Parser *p); +static void *_tmp_144_rule(Parser *p); +static void *_tmp_145_rule(Parser *p); +static void *_tmp_146_rule(Parser *p); +static void *_tmp_147_rule(Parser *p); +static void *_tmp_148_rule(Parser *p); +static void *_tmp_149_rule(Parser *p); +static void *_tmp_150_rule(Parser *p); +static asdl_seq *_loop1_151_rule(Parser *p); +static asdl_seq *_loop1_152_rule(Parser *p); +static void *_tmp_153_rule(Parser *p); +static void *_tmp_154_rule(Parser *p); + + +// file: statements? $ +static mod_ty +file_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + mod_ty _res = NULL; + int _mark = p->mark; + { // statements? $ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> file[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statements? $")); + void *a; + Token * endmarker_var; + if ( + (a = statements_rule(p), 1) // statements? + && + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + D(fprintf(stderr, "%*c+ file[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statements? $")); + _res = _PyPegen_make_module ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s file[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statements? $")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// interactive: statement_newline +static mod_ty +interactive_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + mod_ty _res = NULL; + int _mark = p->mark; + { // statement_newline + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> interactive[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement_newline")); + asdl_seq* a; + if ( + (a = statement_newline_rule(p)) // statement_newline + ) + { + D(fprintf(stderr, "%*c+ interactive[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement_newline")); + _res = Interactive ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s interactive[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement_newline")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// eval: expressions NEWLINE* $ +static mod_ty +eval_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + mod_ty _res = NULL; + int _mark = p->mark; + { // expressions NEWLINE* $ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> eval[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions NEWLINE* $")); + asdl_seq * _loop0_1_var; + expr_ty a; + Token * endmarker_var; + if ( + (a = expressions_rule(p)) // expressions + && + (_loop0_1_var = _loop0_1_rule(p)) // NEWLINE* + && + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + D(fprintf(stderr, "%*c+ eval[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions NEWLINE* $")); + _res = Expression ( a , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s eval[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions NEWLINE* $")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// func_type: '(' type_expressions? ')' '->' expression NEWLINE* $ +static mod_ty +func_type_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + mod_ty _res = NULL; + int _mark = p->mark; + { // '(' type_expressions? ')' '->' expression NEWLINE* $ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> func_type[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); + Token * _literal; + Token * _literal_1; + Token * _literal_2; + asdl_seq * _loop0_2_var; + void *a; + expr_ty b; + Token * endmarker_var; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = type_expressions_rule(p), 1) // type_expressions? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + && + (_literal_2 = _PyPegen_expect_token(p, 51)) // token='->' + && + (b = expression_rule(p)) // expression + && + (_loop0_2_var = _loop0_2_rule(p)) // NEWLINE* + && + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + D(fprintf(stderr, "%*c+ func_type[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); + _res = FunctionType ( a , b , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// fstring: star_expressions +static expr_ty +fstring_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s fstring[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// type_expressions: +// | ','.expression+ ',' '*' expression ',' '**' expression +// | ','.expression+ ',' '*' expression +// | ','.expression+ ',' '**' expression +// | '*' expression ',' '**' expression +// | '*' expression +// | '**' expression +// | ','.expression+ +static asdl_seq* +type_expressions_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.expression+ ',' '*' expression ',' '**' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); + Token * _literal; + Token * _literal_1; + Token * _literal_2; + Token * _literal_3; + asdl_seq * a; + expr_ty b; + expr_ty c; + if ( + (a = _gather_3_rule(p)) // ','.expression+ + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + && + (b = expression_rule(p)) // expression + && + (_literal_2 = _PyPegen_expect_token(p, 12)) // token=',' + && + (_literal_3 = _PyPegen_expect_token(p, 35)) // token='**' + && + (c = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); + } + { // ','.expression+ ',' '*' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression")); + Token * _literal; + Token * _literal_1; + asdl_seq * a; + expr_ty b; + if ( + (a = _gather_5_rule(p)) // ','.expression+ + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression")); + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '*' expression")); + } + { // ','.expression+ ',' '**' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '**' expression")); + Token * _literal; + Token * _literal_1; + asdl_seq * a; + expr_ty b; + if ( + (a = _gather_7_rule(p)) // ','.expression+ + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_literal_1 = _PyPegen_expect_token(p, 35)) // token='**' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '**' expression")); + _res = _PyPegen_seq_append_to_end ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '**' expression")); + } + { // '*' expression ',' '**' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression ',' '**' expression")); + Token * _literal; + Token * _literal_1; + Token * _literal_2; + expr_ty a; + expr_ty b; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = expression_rule(p)) // expression + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + && + (_literal_2 = _PyPegen_expect_token(p, 35)) // token='**' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression ',' '**' expression")); + _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression ',' '**' expression")); + } + { // '*' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression")); + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression")); + } + { // '**' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + && + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression")); + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression")); + } + { // ','.expression+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+")); + asdl_seq * _gather_9_var; + if ( + (_gather_9_var = _gather_9_rule(p)) // ','.expression+ + ) + { + D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+")); + _res = _gather_9_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// statements: statement+ +static asdl_seq* +statements_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // statement+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> statements[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement+")); + asdl_seq * a; + if ( + (a = _loop1_11_rule(p)) // statement+ + ) + { + D(fprintf(stderr, "%*c+ statements[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement+")); + _res = _PyPegen_seq_flatten ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s statements[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// statement: compound_stmt | simple_stmt +static asdl_seq* +statement_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // compound_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> statement[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compound_stmt")); + stmt_ty a; + if ( + (a = compound_stmt_rule(p)) // compound_stmt + ) + { + D(fprintf(stderr, "%*c+ statement[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "compound_stmt")); + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s statement[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compound_stmt")); + } + { // simple_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> statement[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); + asdl_seq* simple_stmt_var; + if ( + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt + ) + { + D(fprintf(stderr, "%*c+ statement[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); + _res = simple_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s statement[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// statement_newline: compound_stmt NEWLINE | simple_stmt | NEWLINE | $ +static asdl_seq* +statement_newline_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // compound_stmt NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compound_stmt NEWLINE")); + stmt_ty a; + Token * newline_var; + if ( + (a = compound_stmt_rule(p)) // compound_stmt + && + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "compound_stmt NEWLINE")); + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compound_stmt NEWLINE")); + } + { // simple_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); + asdl_seq* simple_stmt_var; + if ( + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt + ) + { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); + _res = simple_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); + } + { // NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); + } + { // $ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "$")); + Token * endmarker_var; + if ( + (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' + ) + { + D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "$")); + _res = _PyPegen_interactive_exit ( p ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "$")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// simple_stmt: small_stmt !';' NEWLINE | ';'.small_stmt+ ';'? NEWLINE +static asdl_seq* +simple_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // small_stmt !';' NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "small_stmt !';' NEWLINE")); + stmt_ty a; + Token * newline_var; + if ( + (a = small_stmt_rule(p)) // small_stmt + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) // token=';' + && + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "small_stmt !';' NEWLINE")); + _res = _PyPegen_singleton_seq ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "small_stmt !';' NEWLINE")); + } + { // ';'.small_stmt+ ';'? NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + Token * newline_var; + if ( + (a = _gather_12_rule(p)) // ';'.small_stmt+ + && + (_opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? + && + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// small_stmt: +// | assignment +// | star_expressions +// | &'return' return_stmt +// | &('import' | 'from') import_stmt +// | &'raise' raise_stmt +// | 'pass' +// | &'del' del_stmt +// | &'yield' yield_stmt +// | &'assert' assert_stmt +// | 'break' +// | 'continue' +// | &'global' global_stmt +// | &'nonlocal' nonlocal_stmt +static stmt_ty +small_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + if (_PyPegen_is_memoized(p, small_stmt_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // assignment + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment")); + stmt_ty assignment_var; + if ( + (assignment_var = assignment_rule(p)) // assignment + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment")); + _res = assignment_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment")); + } + { // star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty e; + if ( + (e = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( e , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + { // &'return' return_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt")); + stmt_ty return_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return' + && + (return_stmt_var = return_stmt_rule(p)) // return_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt")); + _res = return_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'return' return_stmt")); + } + { // &('import' | 'from') import_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); + stmt_ty import_stmt_var; + if ( + _PyPegen_lookahead(1, _tmp_14_rule, p) + && + (import_stmt_var = import_stmt_rule(p)) // import_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); + _res = import_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('import' | 'from') import_stmt")); + } + { // &'raise' raise_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); + stmt_ty raise_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise' + && + (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); + _res = raise_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'raise' raise_stmt")); + } + { // 'pass' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'pass'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 502)) // token='pass' + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'pass'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Pass ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'pass'")); + } + { // &'del' del_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); + stmt_ty del_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del' + && + (del_stmt_var = del_stmt_rule(p)) // del_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); + _res = del_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'del' del_stmt")); + } + { // &'yield' yield_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); + stmt_ty yield_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield' + && + (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); + _res = yield_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'yield' yield_stmt")); + } + { // &'assert' assert_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); + stmt_ty assert_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert' + && + (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); + _res = assert_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'assert' assert_stmt")); + } + { // 'break' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'break'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 506)) // token='break' + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'break'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Break ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'break'")); + } + { // 'continue' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'continue'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 507)) // token='continue' + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'continue'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Continue ( EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'continue'")); + } + { // &'global' global_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt")); + stmt_ty global_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global' + && + (global_stmt_var = global_stmt_rule(p)) // global_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt")); + _res = global_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'global' global_stmt")); + } + { // &'nonlocal' nonlocal_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt")); + stmt_ty nonlocal_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal' + && + (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt + ) + { + D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt")); + _res = nonlocal_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'nonlocal' nonlocal_stmt")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, small_stmt_type, _res); + D(p->level--); + return _res; +} + +// compound_stmt: +// | &('def' | '@' | ASYNC) function_def +// | &'if' if_stmt +// | &('class' | '@') class_def +// | &('with' | ASYNC) with_stmt +// | &('for' | ASYNC) for_stmt +// | &'try' try_stmt +// | &'while' while_stmt +static stmt_ty +compound_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + { // &('def' | '@' | ASYNC) function_def + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); + stmt_ty function_def_var; + if ( + _PyPegen_lookahead(1, _tmp_15_rule, p) + && + (function_def_var = function_def_rule(p)) // function_def + ) + { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); + _res = function_def_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); + } + { // &'if' if_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); + stmt_ty if_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if' + && + (if_stmt_var = if_stmt_rule(p)) // if_stmt + ) + { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); + _res = if_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'if' if_stmt")); + } + { // &('class' | '@') class_def + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def")); + stmt_ty class_def_var; + if ( + _PyPegen_lookahead(1, _tmp_16_rule, p) + && + (class_def_var = class_def_rule(p)) // class_def + ) + { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def")); + _res = class_def_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('class' | '@') class_def")); + } + { // &('with' | ASYNC) with_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); + stmt_ty with_stmt_var; + if ( + _PyPegen_lookahead(1, _tmp_17_rule, p) + && + (with_stmt_var = with_stmt_rule(p)) // with_stmt + ) + { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); + _res = with_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('with' | ASYNC) with_stmt")); + } + { // &('for' | ASYNC) for_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); + stmt_ty for_stmt_var; + if ( + _PyPegen_lookahead(1, _tmp_18_rule, p) + && + (for_stmt_var = for_stmt_rule(p)) // for_stmt + ) + { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); + _res = for_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('for' | ASYNC) for_stmt")); + } + { // &'try' try_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); + stmt_ty try_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try' + && + (try_stmt_var = try_stmt_rule(p)) // try_stmt + ) + { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); + _res = try_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'try' try_stmt")); + } + { // &'while' while_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); + stmt_ty while_stmt_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while' + && + (while_stmt_var = while_stmt_rule(p)) // while_stmt + ) + { + D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); + _res = while_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'while' while_stmt")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// assignment: +// | NAME ':' expression ['=' annotated_rhs] +// | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] +// | ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? +// | single_target augassign (yield_expr | star_expressions) +// | invalid_assignment +static stmt_ty +assignment_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME ':' expression ['=' annotated_rhs] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); + Token * _literal; + expr_ty a; + expr_ty b; + void *c; + if ( + (a = _PyPegen_name_token(p)) // NAME + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + && + (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] + ) + { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); + } + { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); + Token * _literal; + void *a; + expr_ty b; + void *c; + if ( + (a = _tmp_20_rule(p)) // '(' single_target ')' | single_subscript_attribute_target + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + && + (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] + ) + { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); + } + { // ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); + asdl_seq * a; + void *b; + void *tc; + if ( + (a = _loop1_22_rule(p)) // ((star_targets '='))+ + && + (b = _tmp_23_rule(p)) // yield_expr | star_expressions + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + ) + { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); + } + { // single_target augassign (yield_expr | star_expressions) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); + expr_ty a; + AugOperator* b; + void *c; + if ( + (a = single_target_rule(p)) // single_target + && + (b = augassign_rule(p)) // augassign + && + (c = _tmp_24_rule(p)) // yield_expr | star_expressions + ) + { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); + } + { // invalid_assignment + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_assignment")); + void *invalid_assignment_var; + if ( + (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment + ) + { + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_assignment")); + _res = invalid_assignment_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_assignment")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// augassign: +// | '+=' +// | '-=' +// | '*=' +// | '@=' +// | '/=' +// | '%=' +// | '&=' +// | '|=' +// | '^=' +// | '<<=' +// | '>>=' +// | '**=' +// | '//=' +static AugOperator* +augassign_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + AugOperator* _res = NULL; + int _mark = p->mark; + { // '+=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 36)) // token='+=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+='")); + _res = _PyPegen_augoperator ( p , Add ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+='")); + } + { // '-=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 37)) // token='-=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-='")); + _res = _PyPegen_augoperator ( p , Sub ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-='")); + } + { // '*=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 38)) // token='*=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*='")); + _res = _PyPegen_augoperator ( p , Mult ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*='")); + } + { // '@=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 50)) // token='@=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@='")); + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@='")); + } + { // '/=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 39)) // token='/=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/='")); + _res = _PyPegen_augoperator ( p , Div ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/='")); + } + { // '%=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'%='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 40)) // token='%=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'%='")); + _res = _PyPegen_augoperator ( p , Mod ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'%='")); + } + { // '&=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'&='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 41)) // token='&=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'&='")); + _res = _PyPegen_augoperator ( p , BitAnd ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'&='")); + } + { // '|=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 42)) // token='|=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'|='")); + _res = _PyPegen_augoperator ( p , BitOr ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'|='")); + } + { // '^=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'^='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 43)) // token='^=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'^='")); + _res = _PyPegen_augoperator ( p , BitXor ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'^='")); + } + { // '<<=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<<='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 44)) // token='<<=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<<='")); + _res = _PyPegen_augoperator ( p , LShift ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<<='")); + } + { // '>>=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>>='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 45)) // token='>>=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>>='")); + _res = _PyPegen_augoperator ( p , RShift ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>>='")); + } + { // '**=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 46)) // token='**=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**='")); + _res = _PyPegen_augoperator ( p , Pow ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**='")); + } + { // '//=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'//='")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 48)) // token='//=' + ) + { + D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'//='")); + _res = _PyPegen_augoperator ( p , FloorDiv ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'//='")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// global_stmt: 'global' ','.NAME+ +static stmt_ty +global_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'global' ','.NAME+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> global_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+")); + Token * _keyword; + asdl_seq * a; + if ( + (_keyword = _PyPegen_expect_token(p, 508)) // token='global' + && + (a = _gather_25_rule(p)) // ','.NAME+ + ) + { + D(fprintf(stderr, "%*c+ global_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s global_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'global' ','.NAME+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// nonlocal_stmt: 'nonlocal' ','.NAME+ +static stmt_ty +nonlocal_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'nonlocal' ','.NAME+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> nonlocal_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+")); + Token * _keyword; + asdl_seq * a; + if ( + (_keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' + && + (a = _gather_27_rule(p)) // ','.NAME+ + ) + { + D(fprintf(stderr, "%*c+ nonlocal_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s nonlocal_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'nonlocal' ','.NAME+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// yield_stmt: yield_expr +static stmt_ty +yield_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // yield_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> yield_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty y; + if ( + (y = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ yield_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Expr ( y , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s yield_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// assert_stmt: 'assert' expression [',' expression] +static stmt_ty +assert_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'assert' expression [',' expression] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> assert_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]")); + Token * _keyword; + expr_ty a; + void *b; + if ( + (_keyword = _PyPegen_expect_token(p, 505)) // token='assert' + && + (a = expression_rule(p)) // expression + && + (b = _tmp_29_rule(p), 1) // [',' expression] + ) + { + D(fprintf(stderr, "%*c+ assert_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Assert ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s assert_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'assert' expression [',' expression]")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// del_stmt: 'del' del_targets +static stmt_ty +del_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'del' del_targets + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); + Token * _keyword; + asdl_seq* a; + if ( + (_keyword = _PyPegen_expect_token(p, 503)) // token='del' + && + (a = del_targets_rule(p)) // del_targets + ) + { + D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Delete ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'del' del_targets")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// import_stmt: import_name | import_from +static stmt_ty +import_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + { // import_name + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_name")); + stmt_ty import_name_var; + if ( + (import_name_var = import_name_rule(p)) // import_name + ) + { + D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_name")); + _res = import_name_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_name")); + } + { // import_from + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from")); + stmt_ty import_from_var; + if ( + (import_from_var = import_from_rule(p)) // import_from + ) + { + D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from")); + _res = import_from_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// import_name: 'import' dotted_as_names +static stmt_ty +import_name_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'import' dotted_as_names + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import' dotted_as_names")); + Token * _keyword; + asdl_seq* a; + if ( + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' + && + (a = dotted_as_names_rule(p)) // dotted_as_names + ) + { + D(fprintf(stderr, "%*c+ import_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import' dotted_as_names")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Import ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import' dotted_as_names")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// import_from: +// | 'from' (('.' | '...'))* dotted_name 'import' import_from_targets +// | 'from' (('.' | '...'))+ 'import' import_from_targets +static stmt_ty +import_from_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); + Token * _keyword; + Token * _keyword_1; + asdl_seq * a; + expr_ty b; + asdl_seq* c; + if ( + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' + && + (a = _loop0_30_rule(p)) // (('.' | '...'))* + && + (b = dotted_name_rule(p)) // dotted_name + && + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' + && + (c = import_from_targets_rule(p)) // import_from_targets + ) + { + D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); + } + { // 'from' (('.' | '...'))+ 'import' import_from_targets + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); + Token * _keyword; + Token * _keyword_1; + asdl_seq * a; + asdl_seq* b; + if ( + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' + && + (a = _loop1_31_rule(p)) // (('.' | '...'))+ + && + (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' + && + (b = import_from_targets_rule(p)) // import_from_targets + ) + { + D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// import_from_targets: +// | '(' import_from_as_names ','? ')' +// | import_from_as_names !',' +// | '*' +// | invalid_import_from_targets +static asdl_seq* +import_from_targets_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // '(' import_from_as_names ','? ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' import_from_as_names ','? ')'")); + Token * _literal; + Token * _literal_1; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq* a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = import_from_as_names_rule(p)) // import_from_as_names + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' import_from_as_names ','? ')'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' import_from_as_names ','? ')'")); + } + { // import_from_as_names !',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names !','")); + asdl_seq* import_from_as_names_var; + if ( + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' + ) + { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names !','")); + _res = import_from_as_names_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names !','")); + } + { // '*' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + ) + { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); + _res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); + } + { // invalid_import_from_targets + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_import_from_targets")); + void *invalid_import_from_targets_var; + if ( + (invalid_import_from_targets_var = invalid_import_from_targets_rule(p)) // invalid_import_from_targets + ) + { + D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_import_from_targets")); + _res = invalid_import_from_targets_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_import_from_targets")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// import_from_as_names: ','.import_from_as_name+ +static asdl_seq* +import_from_as_names_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.import_from_as_name+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); + asdl_seq * a; + if ( + (a = _gather_32_rule(p)) // ','.import_from_as_name+ + ) + { + D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_as_names[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.import_from_as_name+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// import_from_as_name: NAME ['as' NAME] +static alias_ty +import_from_as_name_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + alias_ty _res = NULL; + int _mark = p->mark; + { // NAME ['as' NAME] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> import_from_as_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); + expr_ty a; + void *b; + if ( + (a = _PyPegen_name_token(p)) // NAME + && + (b = _tmp_34_rule(p), 1) // ['as' NAME] + ) + { + D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s import_from_as_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ['as' NAME]")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// dotted_as_names: ','.dotted_as_name+ +static asdl_seq* +dotted_as_names_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.dotted_as_name+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); + asdl_seq * a; + if ( + (a = _gather_35_rule(p)) // ','.dotted_as_name+ + ) + { + D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_as_names[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.dotted_as_name+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// dotted_as_name: dotted_name ['as' NAME] +static alias_ty +dotted_as_name_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + alias_ty _res = NULL; + int _mark = p->mark; + { // dotted_name ['as' NAME] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> dotted_as_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); + expr_ty a; + void *b; + if ( + (a = dotted_name_rule(p)) // dotted_name + && + (b = _tmp_37_rule(p), 1) // ['as' NAME] + ) + { + D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); + _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_as_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name ['as' NAME]")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// Left-recursive +// dotted_name: dotted_name '.' NAME | NAME +static expr_ty dotted_name_raw(Parser *); +static expr_ty +dotted_name_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, dotted_name_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_0 = _PyPegen_update_memo(p, _mark, dotted_name_type, _res); + if (tmpvar_0) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = dotted_name_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +dotted_name_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // dotted_name '.' NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> dotted_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name '.' NAME")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = dotted_name_rule(p)) // dotted_name + && + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + && + (b = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ dotted_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name '.' NAME")); + _res = _PyPegen_join_names_with_dot ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name '.' NAME")); + } + { // NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> dotted_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); + expr_ty name_var; + if ( + (name_var = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ dotted_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); + _res = name_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s dotted_name[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// if_stmt: +// | 'if' named_expression ':' block elif_stmt +// | 'if' named_expression ':' block else_block? +static stmt_ty +if_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'if' named_expression ':' block elif_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> if_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block elif_stmt")); + Token * _keyword; + Token * _literal; + expr_ty a; + asdl_seq* b; + stmt_ty c; + if ( + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = elif_stmt_rule(p)) // elif_stmt + ) + { + D(fprintf(stderr, "%*c+ if_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block elif_stmt")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s if_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' named_expression ':' block elif_stmt")); + } + { // 'if' named_expression ':' block else_block? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> if_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block else_block?")); + Token * _keyword; + Token * _literal; + expr_ty a; + asdl_seq* b; + void *c; + if ( + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = else_block_rule(p), 1) // else_block? + ) + { + D(fprintf(stderr, "%*c+ if_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block else_block?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s if_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' named_expression ':' block else_block?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// elif_stmt: +// | 'elif' named_expression ':' block elif_stmt +// | 'elif' named_expression ':' block else_block? +static stmt_ty +elif_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'elif' named_expression ':' block elif_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> elif_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); + Token * _keyword; + Token * _literal; + expr_ty a; + asdl_seq* b; + stmt_ty c; + if ( + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = elif_stmt_rule(p)) // elif_stmt + ) + { + D(fprintf(stderr, "%*c+ elif_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s elif_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); + } + { // 'elif' named_expression ':' block else_block? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> elif_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block else_block?")); + Token * _keyword; + Token * _literal; + expr_ty a; + asdl_seq* b; + void *c; + if ( + (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = else_block_rule(p), 1) // else_block? + ) + { + D(fprintf(stderr, "%*c+ elif_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block else_block?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_If ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s elif_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'elif' named_expression ':' block else_block?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// else_block: 'else' ':' block +static asdl_seq* +else_block_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // 'else' ':' block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> else_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else' ':' block")); + Token * _keyword; + Token * _literal; + asdl_seq* b; + if ( + (_keyword = _PyPegen_expect_token(p, 516)) // token='else' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ else_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':' block")); + _res = b; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s else_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else' ':' block")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// while_stmt: 'while' named_expression ':' block else_block? +static stmt_ty +while_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'while' named_expression ':' block else_block? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> while_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'while' named_expression ':' block else_block?")); + Token * _keyword; + Token * _literal; + expr_ty a; + asdl_seq* b; + void *c; + if ( + (_keyword = _PyPegen_expect_token(p, 512)) // token='while' + && + (a = named_expression_rule(p)) // named_expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (c = else_block_rule(p), 1) // else_block? + ) + { + D(fprintf(stderr, "%*c+ while_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'while' named_expression ':' block else_block?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_While ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s while_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'while' named_expression ':' block else_block?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// for_stmt: +// | 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? +// | ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? +static stmt_ty +for_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + Token * _keyword; + Token * _keyword_1; + Token * _literal; + asdl_seq* b; + void *el; + expr_ty ex; + expr_ty t; + void *tc; + if ( + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' + && + (t = star_targets_rule(p)) // star_targets + && + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + && + (ex = star_expressions_rule(p)) // star_expressions + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + && + (el = else_block_rule(p), 1) // else_block? + ) + { + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + } + { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + Token * _keyword; + Token * _keyword_1; + Token * _literal; + Token * async_var; + asdl_seq* b; + void *el; + expr_ty ex; + expr_ty t; + void *tc; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' + && + (t = star_targets_rule(p)) // star_targets + && + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + && + (ex = star_expressions_rule(p)) // star_expressions + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + && + (el = else_block_rule(p), 1) // else_block? + ) + { + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// with_stmt: +// | 'with' '(' ','.with_item+ ','? ')' ':' block +// | 'with' ','.with_item+ ':' TYPE_COMMENT? block +// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block +// | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block +static stmt_ty +with_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'with' '(' ','.with_item+ ','? ')' ':' block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + asdl_seq* b; + if ( + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' + && + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = _gather_38_rule(p)) // ','.with_item+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + && + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); + } + { // 'with' ','.with_item+ ':' TYPE_COMMENT? block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); + Token * _keyword; + Token * _literal; + asdl_seq * a; + asdl_seq* b; + void *tc; + if ( + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' + && + (a = _gather_40_rule(p)) // ','.with_item+ + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); + } + { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + Token * async_var; + asdl_seq* b; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' + && + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = _gather_42_rule(p)) // ','.with_item+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + && + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); + } + { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); + Token * _keyword; + Token * _literal; + asdl_seq * a; + Token * async_var; + asdl_seq* b; + void *tc; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' + && + (a = _gather_44_rule(p)) // ','.with_item+ + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// with_item: expression ['as' target] +static withitem_ty +with_item_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + withitem_ty _res = NULL; + int _mark = p->mark; + { // expression ['as' target] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); + expr_ty e; + void *o; + if ( + (e = expression_rule(p)) // expression + && + (o = _tmp_46_rule(p), 1) // ['as' target] + ) + { + D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); + _res = _Py_withitem ( e , o , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s with_item[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' target]")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// try_stmt: +// | 'try' ':' block finally_block +// | 'try' ':' block except_block+ else_block? finally_block? +static stmt_ty +try_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'try' ':' block finally_block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block finally_block")); + Token * _keyword; + Token * _literal; + asdl_seq* b; + asdl_seq* f; + if ( + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (f = finally_block_rule(p)) // finally_block + ) + { + D(fprintf(stderr, "%*c+ try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block finally_block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , NULL , NULL , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s try_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block finally_block")); + } + { // 'try' ':' block except_block+ else_block? finally_block? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); + Token * _keyword; + Token * _literal; + asdl_seq* b; + void *el; + asdl_seq * ex; + void *f; + if ( + (_keyword = _PyPegen_expect_token(p, 511)) // token='try' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + && + (ex = _loop1_47_rule(p)) // except_block+ + && + (el = else_block_rule(p), 1) // else_block? + && + (f = finally_block_rule(p), 1) // finally_block? + ) + { + D(fprintf(stderr, "%*c+ try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Try ( b , ex , el , f , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s try_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// except_block: 'except' expression ['as' NAME] ':' block | 'except' ':' block +static excepthandler_ty +except_block_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + excepthandler_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'except' expression ['as' NAME] ':' block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block")); + Token * _keyword; + Token * _literal; + asdl_seq* b; + expr_ty e; + void *t; + if ( + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' + && + (e = expression_rule(p)) // expression + && + (t = _tmp_48_rule(p), 1) // ['as' NAME] + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression ['as' NAME] ':' block")); + } + { // 'except' ':' block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' ':' block")); + Token * _keyword; + Token * _literal; + asdl_seq* b; + if ( + (_keyword = _PyPegen_expect_token(p, 520)) // token='except' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' ':' block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' ':' block")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// finally_block: 'finally' ':' block +static asdl_seq* +finally_block_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // 'finally' ':' block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> finally_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally' ':' block")); + Token * _keyword; + Token * _literal; + asdl_seq* a; + if ( + (_keyword = _PyPegen_expect_token(p, 521)) // token='finally' + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (a = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ finally_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally' ':' block")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s finally_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally' ':' block")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// return_stmt: 'return' star_expressions? +static stmt_ty +return_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'return' star_expressions? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> return_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'return' star_expressions?")); + Token * _keyword; + void *a; + if ( + (_keyword = _PyPegen_expect_token(p, 500)) // token='return' + && + (a = star_expressions_rule(p), 1) // star_expressions? + ) + { + D(fprintf(stderr, "%*c+ return_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'return' star_expressions?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Return ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s return_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'return' star_expressions?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// raise_stmt: 'raise' expression ['from' expression] | 'raise' +static stmt_ty +raise_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'raise' expression ['from' expression] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); + Token * _keyword; + expr_ty a; + void *b; + if ( + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' + && + (a = expression_rule(p)) // expression + && + (b = _tmp_49_rule(p), 1) // ['from' expression] + ) + { + D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' expression ['from' expression]")); + } + { // 'raise' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' + ) + { + D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Raise ( NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// function_def: decorators function_def_raw | function_def_raw +static stmt_ty +function_def_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + { // decorators function_def_raw + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> function_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "decorators function_def_raw")); + asdl_seq* d; + stmt_ty f; + if ( + (d = decorators_rule(p)) // decorators + && + (f = function_def_raw_rule(p)) // function_def_raw + ) + { + D(fprintf(stderr, "%*c+ function_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "decorators function_def_raw")); + _res = _PyPegen_function_def_decorators ( p , d , f ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "decorators function_def_raw")); + } + { // function_def_raw + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> function_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "function_def_raw")); + stmt_ty function_def_raw_var; + if ( + (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw + ) + { + D(fprintf(stderr, "%*c+ function_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "function_def_raw")); + _res = function_def_raw_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "function_def_raw")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// function_def_raw: +// | 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block +// | ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block +static stmt_ty +function_def_raw_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *a; + asdl_seq* b; + expr_ty n; + void *params; + void *tc; + if ( + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' + && + (n = _PyPegen_name_token(p)) // NAME + && + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (params = params_rule(p), 1) // params? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + && + (a = _tmp_50_rule(p), 1) // ['->' expression] + && + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + && + (tc = func_type_comment_rule(p), 1) // func_type_comment? + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); + } + { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); + Token * _keyword; + Token * _literal; + Token * _literal_1; + Token * _literal_2; + void *a; + Token * async_var; + asdl_seq* b; + expr_ty n; + void *params; + void *tc; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' + && + (n = _PyPegen_name_token(p)) // NAME + && + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (params = params_rule(p), 1) // params? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + && + (a = _tmp_51_rule(p), 1) // ['->' expression] + && + (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' + && + (tc = func_type_comment_rule(p), 1) // func_type_comment? + && + (b = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// func_type_comment: +// | NEWLINE TYPE_COMMENT &(NEWLINE INDENT) +// | invalid_double_type_comments +// | TYPE_COMMENT +static Token* +func_type_comment_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + Token* _res = NULL; + int _mark = p->mark; + { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); + Token * newline_var; + Token * t; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + && + (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + && + _PyPegen_lookahead(1, _tmp_52_rule, p) + ) + { + D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); + _res = t; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); + } + { // invalid_double_type_comments + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_double_type_comments")); + void *invalid_double_type_comments_var; + if ( + (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments + ) + { + D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_double_type_comments")); + _res = invalid_double_type_comments_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_double_type_comments")); + } + { // TYPE_COMMENT + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT")); + Token * type_comment_var; + if ( + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + ) + { + D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT")); + _res = type_comment_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "TYPE_COMMENT")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// params: invalid_parameters | parameters +static arguments_ty +params_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arguments_ty _res = NULL; + int _mark = p->mark; + { // invalid_parameters + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_parameters")); + void *invalid_parameters_var; + if ( + (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters + ) + { + D(fprintf(stderr, "%*c+ params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_parameters")); + _res = invalid_parameters_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_parameters")); + } + { // parameters + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "parameters")); + arguments_ty parameters_var; + if ( + (parameters_var = parameters_rule(p)) // parameters + ) + { + D(fprintf(stderr, "%*c+ params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "parameters")); + _res = parameters_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "parameters")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// parameters: +// | slash_no_default param_no_default* param_with_default* star_etc? +// | slash_with_default param_with_default* star_etc? +// | param_no_default+ param_with_default* star_etc? +// | param_with_default+ star_etc? +// | star_etc +static arguments_ty +parameters_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arguments_ty _res = NULL; + int _mark = p->mark; + { // slash_no_default param_no_default* param_with_default* star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); + asdl_seq* a; + asdl_seq * b; + asdl_seq * c; + void *d; + if ( + (a = slash_no_default_rule(p)) // slash_no_default + && + (b = _loop0_53_rule(p)) // param_no_default* + && + (c = _loop0_54_rule(p)) // param_with_default* + && + (d = star_etc_rule(p), 1) // star_etc? + ) + { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); + } + { // slash_with_default param_with_default* star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default param_with_default* star_etc?")); + SlashWithDefault* a; + asdl_seq * b; + void *c; + if ( + (a = slash_with_default_rule(p)) // slash_with_default + && + (b = _loop0_55_rule(p)) // param_with_default* + && + (c = star_etc_rule(p), 1) // star_etc? + ) + { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default param_with_default* star_etc?")); + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default param_with_default* star_etc?")); + } + { // param_no_default+ param_with_default* star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); + asdl_seq * a; + asdl_seq * b; + void *c; + if ( + (a = _loop1_56_rule(p)) // param_no_default+ + && + (b = _loop0_57_rule(p)) // param_with_default* + && + (c = star_etc_rule(p), 1) // star_etc? + ) + { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); + } + { // param_with_default+ star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+ star_etc?")); + asdl_seq * a; + void *b; + if ( + (a = _loop1_58_rule(p)) // param_with_default+ + && + (b = star_etc_rule(p), 1) // star_etc? + ) + { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+ star_etc?")); + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+ star_etc?")); + } + { // star_etc + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_etc")); + StarEtc* a; + if ( + (a = star_etc_rule(p)) // star_etc + ) + { + D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_etc")); + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_etc")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')' +static asdl_seq* +slash_no_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // param_no_default+ '/' ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' ','")); + Token * _literal; + Token * _literal_1; + asdl_seq * a; + if ( + (a = _loop1_59_rule(p)) // param_no_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' ','")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ '/' ','")); + } + { // param_no_default+ '/' &')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' &')'")); + Token * _literal; + asdl_seq * a; + if ( + (a = _loop1_60_rule(p)) // param_no_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' + ) + { + D(fprintf(stderr, "%*c+ slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' &')'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ '/' &')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// slash_with_default: +// | param_no_default* param_with_default+ '/' ',' +// | param_no_default* param_with_default+ '/' &')' +static SlashWithDefault* +slash_with_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + SlashWithDefault* _res = NULL; + int _mark = p->mark; + { // param_no_default* param_with_default+ '/' ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); + Token * _literal; + Token * _literal_1; + asdl_seq * a; + asdl_seq * b; + if ( + (a = _loop0_61_rule(p)) // param_no_default* + && + (b = _loop1_62_rule(p)) // param_with_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); + } + { // param_no_default* param_with_default+ '/' &')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); + Token * _literal; + asdl_seq * a; + asdl_seq * b; + if ( + (a = _loop0_63_rule(p)) // param_no_default* + && + (b = _loop1_64_rule(p)) // param_with_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' + ) + { + D(fprintf(stderr, "%*c+ slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// star_etc: +// | '*' param_no_default param_maybe_default* kwds? +// | '*' ',' param_maybe_default+ kwds? +// | kwds +// | invalid_star_etc +static StarEtc* +star_etc_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + StarEtc* _res = NULL; + int _mark = p->mark; + { // '*' param_no_default param_maybe_default* kwds? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); + Token * _literal; + arg_ty a; + asdl_seq * b; + void *c; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = param_no_default_rule(p)) // param_no_default + && + (b = _loop0_65_rule(p)) // param_maybe_default* + && + (c = kwds_rule(p), 1) // kwds? + ) + { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); + } + { // '*' ',' param_maybe_default+ kwds? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); + Token * _literal; + Token * _literal_1; + asdl_seq * b; + void *c; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + && + (b = _loop1_66_rule(p)) // param_maybe_default+ + && + (c = kwds_rule(p), 1) // kwds? + ) + { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); + } + { // kwds + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwds")); + arg_ty a; + if ( + (a = kwds_rule(p)) // kwds + ) + { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwds")); + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwds")); + } + { // invalid_star_etc + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_star_etc")); + void *invalid_star_etc_var; + if ( + (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc + ) + { + D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_star_etc")); + _res = invalid_star_etc_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_star_etc")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// kwds: '**' param_no_default +static arg_ty +kwds_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arg_ty _res = NULL; + int _mark = p->mark; + { // '**' param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwds[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' param_no_default")); + Token * _literal; + arg_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + && + (a = param_no_default_rule(p)) // param_no_default + ) + { + D(fprintf(stderr, "%*c+ kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param_no_default")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwds[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' param_no_default")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')' +static arg_ty +param_no_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arg_ty _res = NULL; + int _mark = p->mark; + { // param ',' TYPE_COMMENT? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param ',' TYPE_COMMENT?")); + Token * _literal; + arg_ty a; + void *tc; + if ( + (a = param_rule(p)) // param + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + ) + { + D(fprintf(stderr, "%*c+ param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param ',' TYPE_COMMENT?")); + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param ',' TYPE_COMMENT?")); + } + { // param TYPE_COMMENT? &')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param TYPE_COMMENT? &')'")); + arg_ty a; + void *tc; + if ( + (a = param_rule(p)) // param + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' + ) + { + D(fprintf(stderr, "%*c+ param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param TYPE_COMMENT? &')'")); + _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param TYPE_COMMENT? &')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')' +static NameDefaultPair* +param_with_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + NameDefaultPair* _res = NULL; + int _mark = p->mark; + { // param default ',' TYPE_COMMENT? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default ',' TYPE_COMMENT?")); + Token * _literal; + arg_ty a; + expr_ty c; + void *tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p)) // default + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + ) + { + D(fprintf(stderr, "%*c+ param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default ',' TYPE_COMMENT?")); + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default ',' TYPE_COMMENT?")); + } + { // param default TYPE_COMMENT? &')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default TYPE_COMMENT? &')'")); + arg_ty a; + expr_ty c; + void *tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p)) // default + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' + ) + { + D(fprintf(stderr, "%*c+ param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default TYPE_COMMENT? &')'")); + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default TYPE_COMMENT? &')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// param_maybe_default: +// | param default? ',' TYPE_COMMENT? +// | param default? TYPE_COMMENT? &')' +static NameDefaultPair* +param_maybe_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + NameDefaultPair* _res = NULL; + int _mark = p->mark; + { // param default? ',' TYPE_COMMENT? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default? ',' TYPE_COMMENT?")); + Token * _literal; + arg_ty a; + void *c; + void *tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p), 1) // default? + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + ) + { + D(fprintf(stderr, "%*c+ param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default? ',' TYPE_COMMENT?")); + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default? ',' TYPE_COMMENT?")); + } + { // param default? TYPE_COMMENT? &')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default? TYPE_COMMENT? &')'")); + arg_ty a; + void *c; + void *tc; + if ( + (a = param_rule(p)) // param + && + (c = default_rule(p), 1) // default? + && + (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' + ) + { + D(fprintf(stderr, "%*c+ param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default? TYPE_COMMENT? &')'")); + _res = _PyPegen_name_default_pair ( p , a , c , tc ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default? TYPE_COMMENT? &')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// param: NAME annotation? +static arg_ty +param_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arg_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME annotation? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> param[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME annotation?")); + expr_ty a; + void *b; + if ( + (a = _PyPegen_name_token(p)) // NAME + && + (b = annotation_rule(p), 1) // annotation? + ) + { + D(fprintf(stderr, "%*c+ param[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME annotation?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s param[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME annotation?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// annotation: ':' expression +static expr_ty +annotation_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // ':' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> annotation[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ annotation[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s annotation[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// default: '=' expression +static expr_ty +default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // '=' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' expression")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' expression")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// decorators: (('@' named_expression NEWLINE))+ +static asdl_seq* +decorators_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // (('@' named_expression NEWLINE))+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); + asdl_seq * a; + if ( + (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ + ) + { + D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s decorators[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(('@' named_expression NEWLINE))+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// class_def: decorators class_def_raw | class_def_raw +static stmt_ty +class_def_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + { // decorators class_def_raw + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> class_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "decorators class_def_raw")); + asdl_seq* a; + stmt_ty b; + if ( + (a = decorators_rule(p)) // decorators + && + (b = class_def_raw_rule(p)) // class_def_raw + ) + { + D(fprintf(stderr, "%*c+ class_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "decorators class_def_raw")); + _res = _PyPegen_class_def_decorators ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s class_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "decorators class_def_raw")); + } + { // class_def_raw + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> class_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "class_def_raw")); + stmt_ty class_def_raw_var; + if ( + (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw + ) + { + D(fprintf(stderr, "%*c+ class_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "class_def_raw")); + _res = class_def_raw_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s class_def[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "class_def_raw")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// class_def_raw: 'class' NAME ['(' arguments? ')'] ':' block +static stmt_ty +class_def_raw_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + stmt_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'class' NAME ['(' arguments? ')'] ':' block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> class_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); + Token * _keyword; + Token * _literal; + expr_ty a; + void *b; + asdl_seq* c; + if ( + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' + && + (a = _PyPegen_name_token(p)) // NAME + && + (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (c = block_rule(p)) // block + ) + { + D(fprintf(stderr, "%*c+ class_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s class_def_raw[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// block: NEWLINE INDENT statements DEDENT | simple_stmt | invalid_block +static asdl_seq* +block_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + if (_PyPegen_is_memoized(p, block_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + { // NEWLINE INDENT statements DEDENT + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT statements DEDENT")); + asdl_seq* a; + Token * dedent_var; + Token * indent_var; + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' + && + (a = statements_rule(p)) // statements + && + (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' + ) + { + D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT statements DEDENT")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT statements DEDENT")); + } + { // simple_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); + asdl_seq* simple_stmt_var; + if ( + (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt + ) + { + D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); + _res = simple_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); + } + { // invalid_block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_block")); + void *invalid_block_var; + if ( + (invalid_block_var = invalid_block_rule(p)) // invalid_block + ) + { + D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_block")); + _res = invalid_block_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_block")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, block_type, _res); + D(p->level--); + return _res; +} + +// expressions_list: ','.star_expression+ ','? +static asdl_seq* +expressions_list_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.star_expression+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> expressions_list[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_expression+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + if ( + (a = _gather_69_rule(p)) // ','.star_expression+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ expressions_list[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_expression+ ','?")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions_list[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_expression+ ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// star_expressions: +// | star_expression ((',' star_expression))+ ','? +// | star_expression ',' +// | star_expression +static expr_ty +star_expressions_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // star_expression ((',' star_expression))+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + asdl_seq * b; + if ( + (a = star_expression_rule(p)) // star_expression + && + (b = _loop1_71_rule(p)) // ((',' star_expression))+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); + } + { // star_expression ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression ','")); + Token * _literal; + expr_ty a; + if ( + (a = star_expression_rule(p)) // star_expression + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression ','")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression ','")); + } + { // star_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression")); + expr_ty star_expression_var; + if ( + (star_expression_var = star_expression_rule(p)) // star_expression + ) + { + D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression")); + _res = star_expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// star_expression: '*' bitwise_or | expression +static expr_ty +star_expression_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_expression_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '*' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ star_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' bitwise_or")); + } + { // expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); + expr_ty expression_var; + if ( + (expression_var = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ star_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); + _res = expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, star_expression_type, _res); + D(p->level--); + return _res; +} + +// star_named_expressions: ','.star_named_expression+ ','? +static asdl_seq* +star_named_expressions_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.star_named_expression+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_named_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_named_expression+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + if ( + (a = _gather_72_rule(p)) // ','.star_named_expression+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ star_named_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_named_expression+ ','?")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_named_expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_named_expression+ ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// star_named_expression: '*' bitwise_or | named_expression +static expr_ty +star_named_expression_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '*' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ star_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' bitwise_or")); + } + { // named_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); + expr_ty named_expression_var; + if ( + (named_expression_var = named_expression_rule(p)) // named_expression + ) + { + D(fprintf(stderr, "%*c+ star_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); + _res = named_expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// named_expression: NAME ':=' expression | expression !':=' | invalid_named_expression +static expr_ty +named_expression_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME ':=' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = _PyPegen_name_token(p)) // NAME + && + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':=' expression")); + } + { // expression !':=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); + expr_ty expression_var; + if ( + (expression_var = expression_rule(p)) // expression + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' + ) + { + D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); + _res = expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); + } + { // invalid_named_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_named_expression")); + void *invalid_named_expression_var; + if ( + (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression + ) + { + D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_named_expression")); + _res = invalid_named_expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_named_expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// annotated_rhs: yield_expr | star_expressions +static expr_ty +annotated_rhs_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> annotated_rhs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ annotated_rhs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s annotated_rhs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> annotated_rhs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ annotated_rhs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s annotated_rhs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// expressions: expression ((',' expression))+ ','? | expression ',' | expression +static expr_ty +expressions_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // expression ((',' expression))+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ((',' expression))+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + asdl_seq * b; + if ( + (a = expression_rule(p)) // expression + && + (b = _loop1_74_rule(p)) // ((',' expression))+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ((',' expression))+ ','?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ((',' expression))+ ','?")); + } + { // expression ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ','")); + Token * _literal; + expr_ty a; + if ( + (a = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ','")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ','")); + } + { // expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); + expr_ty expression_var; + if ( + (expression_var = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); + _res = expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// expression: disjunction 'if' disjunction 'else' expression | disjunction | lambdef +static expr_ty +expression_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, expression_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // disjunction 'if' disjunction 'else' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); + Token * _keyword; + Token * _keyword_1; + expr_ty a; + expr_ty b; + expr_ty c; + if ( + (a = disjunction_rule(p)) // disjunction + && + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' + && + (b = disjunction_rule(p)) // disjunction + && + (_keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' + && + (c = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_IfExp ( b , a , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); + } + { // disjunction + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "disjunction")); + expr_ty disjunction_var; + if ( + (disjunction_var = disjunction_rule(p)) // disjunction + ) + { + D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction")); + _res = disjunction_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction")); + } + { // lambdef + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambdef")); + expr_ty lambdef_var; + if ( + (lambdef_var = lambdef_rule(p)) // lambdef + ) + { + D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambdef")); + _res = lambdef_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambdef")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, expression_type, _res); + D(p->level--); + return _res; +} + +// lambdef: 'lambda' lambda_params? ':' expression +static expr_ty +lambdef_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'lambda' lambda_params? ':' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambdef[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' expression")); + Token * _keyword; + Token * _literal; + void *a; + expr_ty b; + if ( + (_keyword = _PyPegen_expect_token(p, 524)) // token='lambda' + && + (a = lambda_params_rule(p), 1) // lambda_params? + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ lambdef[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambdef[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_params? ':' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_params: invalid_lambda_parameters | lambda_parameters +static arguments_ty +lambda_params_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arguments_ty _res = NULL; + int _mark = p->mark; + { // invalid_lambda_parameters + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_lambda_parameters")); + void *invalid_lambda_parameters_var; + if ( + (invalid_lambda_parameters_var = invalid_lambda_parameters_rule(p)) // invalid_lambda_parameters + ) + { + D(fprintf(stderr, "%*c+ lambda_params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_lambda_parameters")); + _res = invalid_lambda_parameters_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_lambda_parameters")); + } + { // lambda_parameters + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_parameters")); + arguments_ty lambda_parameters_var; + if ( + (lambda_parameters_var = lambda_parameters_rule(p)) // lambda_parameters + ) + { + D(fprintf(stderr, "%*c+ lambda_params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_parameters")); + _res = lambda_parameters_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_params[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_parameters")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_parameters: +// | lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? +// | lambda_slash_with_default lambda_param_with_default* lambda_star_etc? +// | lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? +// | lambda_param_with_default+ lambda_star_etc? +// | lambda_star_etc +static arguments_ty +lambda_parameters_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arguments_ty _res = NULL; + int _mark = p->mark; + { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); + asdl_seq* a; + asdl_seq * b; + asdl_seq * c; + void *d; + if ( + (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default + && + (b = _loop0_75_rule(p)) // lambda_param_no_default* + && + (c = _loop0_76_rule(p)) // lambda_param_with_default* + && + (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? + ) + { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); + _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); + } + { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); + SlashWithDefault* a; + asdl_seq * b; + void *c; + if ( + (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default + && + (b = _loop0_77_rule(p)) // lambda_param_with_default* + && + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? + ) + { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); + _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); + } + { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); + asdl_seq * a; + asdl_seq * b; + void *c; + if ( + (a = _loop1_78_rule(p)) // lambda_param_no_default+ + && + (b = _loop0_79_rule(p)) // lambda_param_with_default* + && + (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? + ) + { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); + _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); + } + { // lambda_param_with_default+ lambda_star_etc? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); + asdl_seq * a; + void *b; + if ( + (a = _loop1_80_rule(p)) // lambda_param_with_default+ + && + (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? + ) + { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); + } + { // lambda_star_etc + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_star_etc")); + StarEtc* a; + if ( + (a = lambda_star_etc_rule(p)) // lambda_star_etc + ) + { + D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_star_etc")); + _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_star_etc")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_slash_no_default: +// | lambda_param_no_default+ '/' ',' +// | lambda_param_no_default+ '/' &':' +static asdl_seq* +lambda_slash_no_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // lambda_param_no_default+ '/' ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' ','")); + Token * _literal; + Token * _literal_1; + asdl_seq * a; + if ( + (a = _loop1_81_rule(p)) // lambda_param_no_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ lambda_slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' ','")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ '/' ','")); + } + { // lambda_param_no_default+ '/' &':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' &':'")); + Token * _literal; + asdl_seq * a; + if ( + (a = _loop1_82_rule(p)) // lambda_param_no_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' + ) + { + D(fprintf(stderr, "%*c+ lambda_slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' &':'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ '/' &':'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_slash_with_default: +// | lambda_param_no_default* lambda_param_with_default+ '/' ',' +// | lambda_param_no_default* lambda_param_with_default+ '/' &':' +static SlashWithDefault* +lambda_slash_with_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + SlashWithDefault* _res = NULL; + int _mark = p->mark; + { // lambda_param_no_default* lambda_param_with_default+ '/' ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); + Token * _literal; + Token * _literal_1; + asdl_seq * a; + asdl_seq * b; + if ( + (a = _loop0_83_rule(p)) // lambda_param_no_default* + && + (b = _loop1_84_rule(p)) // lambda_param_with_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ lambda_slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); + } + { // lambda_param_no_default* lambda_param_with_default+ '/' &':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); + Token * _literal; + asdl_seq * a; + asdl_seq * b; + if ( + (a = _loop0_85_rule(p)) // lambda_param_no_default* + && + (b = _loop1_86_rule(p)) // lambda_param_with_default+ + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' + ) + { + D(fprintf(stderr, "%*c+ lambda_slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); + _res = _PyPegen_slash_with_default ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_star_etc: +// | '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? +// | '*' ',' lambda_param_maybe_default+ lambda_kwds? +// | lambda_kwds +// | invalid_lambda_star_etc +static StarEtc* +lambda_star_etc_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + StarEtc* _res = NULL; + int _mark = p->mark; + { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); + Token * _literal; + arg_ty a; + asdl_seq * b; + void *c; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default + && + (b = _loop0_87_rule(p)) // lambda_param_maybe_default* + && + (c = lambda_kwds_rule(p), 1) // lambda_kwds? + ) + { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); + _res = _PyPegen_star_etc ( p , a , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); + } + { // '*' ',' lambda_param_maybe_default+ lambda_kwds? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); + Token * _literal; + Token * _literal_1; + asdl_seq * b; + void *c; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + && + (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ + && + (c = lambda_kwds_rule(p), 1) // lambda_kwds? + ) + { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); + _res = _PyPegen_star_etc ( p , NULL , b , c ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); + } + { // lambda_kwds + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_kwds")); + arg_ty a; + if ( + (a = lambda_kwds_rule(p)) // lambda_kwds + ) + { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_kwds")); + _res = _PyPegen_star_etc ( p , NULL , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_kwds")); + } + { // invalid_lambda_star_etc + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_lambda_star_etc")); + void *invalid_lambda_star_etc_var; + if ( + (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc + ) + { + D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_lambda_star_etc")); + _res = invalid_lambda_star_etc_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_lambda_star_etc")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_kwds: '**' lambda_param_no_default +static arg_ty +lambda_kwds_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arg_ty _res = NULL; + int _mark = p->mark; + { // '**' lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_kwds[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' lambda_param_no_default")); + Token * _literal; + arg_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + && + (a = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + D(fprintf(stderr, "%*c+ lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param_no_default")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_kwds[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' lambda_param_no_default")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_param_no_default: lambda_param ',' | lambda_param &':' +static arg_ty +lambda_param_no_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arg_ty _res = NULL; + int _mark = p->mark; + { // lambda_param ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param ','")); + Token * _literal; + arg_ty a; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ lambda_param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param ','")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param ','")); + } + { // lambda_param &':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param &':'")); + arg_ty a; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' + ) + { + D(fprintf(stderr, "%*c+ lambda_param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param &':'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_no_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param &':'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_param_with_default: lambda_param default ',' | lambda_param default &':' +static NameDefaultPair* +lambda_param_with_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + NameDefaultPair* _res = NULL; + int _mark = p->mark; + { // lambda_param default ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default ','")); + Token * _literal; + arg_ty a; + expr_ty c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p)) // default + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ lambda_param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default ','")); + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default ','")); + } + { // lambda_param default &':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default &':'")); + arg_ty a; + expr_ty c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p)) // default + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' + ) + { + D(fprintf(stderr, "%*c+ lambda_param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default &':'")); + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_with_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default &':'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':' +static NameDefaultPair* +lambda_param_maybe_default_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + NameDefaultPair* _res = NULL; + int _mark = p->mark; + { // lambda_param default? ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default? ','")); + Token * _literal; + arg_ty a; + void *c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p), 1) // default? + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ lambda_param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default? ','")); + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default? ','")); + } + { // lambda_param default? &':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default? &':'")); + arg_ty a; + void *c; + if ( + (a = lambda_param_rule(p)) // lambda_param + && + (c = default_rule(p), 1) // default? + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' + ) + { + D(fprintf(stderr, "%*c+ lambda_param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default? &':'")); + _res = _PyPegen_name_default_pair ( p , a , c , NULL ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default? &':'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lambda_param: NAME +static arg_ty +lambda_param_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + arg_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lambda_param[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); + expr_ty a; + if ( + (a = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ lambda_param[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lambda_param[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// disjunction: conjunction (('or' conjunction))+ | conjunction +static expr_ty +disjunction_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, disjunction_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // conjunction (('or' conjunction))+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> disjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); + expr_ty a; + asdl_seq * b; + if ( + (a = conjunction_rule(p)) // conjunction + && + (b = _loop1_89_rule(p)) // (('or' conjunction))+ + ) + { + D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s disjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "conjunction (('or' conjunction))+")); + } + { // conjunction + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> disjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "conjunction")); + expr_ty conjunction_var; + if ( + (conjunction_var = conjunction_rule(p)) // conjunction + ) + { + D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction")); + _res = conjunction_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s disjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "conjunction")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, disjunction_type, _res); + D(p->level--); + return _res; +} + +// conjunction: inversion (('and' inversion))+ | inversion +static expr_ty +conjunction_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, conjunction_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // inversion (('and' inversion))+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> conjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); + expr_ty a; + asdl_seq * b; + if ( + (a = inversion_rule(p)) // inversion + && + (b = _loop1_90_rule(p)) // (('and' inversion))+ + ) + { + D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s conjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "inversion (('and' inversion))+")); + } + { // inversion + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> conjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "inversion")); + expr_ty inversion_var; + if ( + (inversion_var = inversion_rule(p)) // inversion + ) + { + D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion")); + _res = inversion_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s conjunction[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "inversion")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, conjunction_type, _res); + D(p->level--); + return _res; +} + +// inversion: 'not' inversion | comparison +static expr_ty +inversion_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, inversion_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'not' inversion + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> inversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'not' inversion")); + Token * _keyword; + expr_ty a; + if ( + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' + && + (a = inversion_rule(p)) // inversion + ) + { + D(fprintf(stderr, "%*c+ inversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'not' inversion")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Not , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s inversion[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'not' inversion")); + } + { // comparison + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> inversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "comparison")); + expr_ty comparison_var; + if ( + (comparison_var = comparison_rule(p)) // comparison + ) + { + D(fprintf(stderr, "%*c+ inversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "comparison")); + _res = comparison_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s inversion[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "comparison")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, inversion_type, _res); + D(p->level--); + return _res; +} + +// comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or +static expr_ty +comparison_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // bitwise_or compare_op_bitwise_or_pair+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> comparison[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); + expr_ty a; + asdl_seq * b; + if ( + (a = bitwise_or_rule(p)) // bitwise_or + && + (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ + ) + { + D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s comparison[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); + } + { // bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> comparison[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or")); + expr_ty bitwise_or_var; + if ( + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or")); + _res = bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s comparison[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// compare_op_bitwise_or_pair: +// | eq_bitwise_or +// | noteq_bitwise_or +// | lte_bitwise_or +// | lt_bitwise_or +// | gte_bitwise_or +// | gt_bitwise_or +// | notin_bitwise_or +// | in_bitwise_or +// | isnot_bitwise_or +// | is_bitwise_or +static CmpopExprPair* +compare_op_bitwise_or_pair_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // eq_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "eq_bitwise_or")); + CmpopExprPair* eq_bitwise_or_var; + if ( + (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "eq_bitwise_or")); + _res = eq_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "eq_bitwise_or")); + } + { // noteq_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "noteq_bitwise_or")); + CmpopExprPair* noteq_bitwise_or_var; + if ( + (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "noteq_bitwise_or")); + _res = noteq_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "noteq_bitwise_or")); + } + { // lte_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lte_bitwise_or")); + CmpopExprPair* lte_bitwise_or_var; + if ( + (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lte_bitwise_or")); + _res = lte_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lte_bitwise_or")); + } + { // lt_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lt_bitwise_or")); + CmpopExprPair* lt_bitwise_or_var; + if ( + (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lt_bitwise_or")); + _res = lt_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lt_bitwise_or")); + } + { // gte_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "gte_bitwise_or")); + CmpopExprPair* gte_bitwise_or_var; + if ( + (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "gte_bitwise_or")); + _res = gte_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "gte_bitwise_or")); + } + { // gt_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "gt_bitwise_or")); + CmpopExprPair* gt_bitwise_or_var; + if ( + (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "gt_bitwise_or")); + _res = gt_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "gt_bitwise_or")); + } + { // notin_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "notin_bitwise_or")); + CmpopExprPair* notin_bitwise_or_var; + if ( + (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "notin_bitwise_or")); + _res = notin_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "notin_bitwise_or")); + } + { // in_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "in_bitwise_or")); + CmpopExprPair* in_bitwise_or_var; + if ( + (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "in_bitwise_or")); + _res = in_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "in_bitwise_or")); + } + { // isnot_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "isnot_bitwise_or")); + CmpopExprPair* isnot_bitwise_or_var; + if ( + (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "isnot_bitwise_or")); + _res = isnot_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "isnot_bitwise_or")); + } + { // is_bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "is_bitwise_or")); + CmpopExprPair* is_bitwise_or_var; + if ( + (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or + ) + { + D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "is_bitwise_or")); + _res = is_bitwise_or_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "is_bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// eq_bitwise_or: '==' bitwise_or +static CmpopExprPair* +eq_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // '==' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> eq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'==' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 27)) // token='==' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ eq_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'==' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s eq_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'==' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// noteq_bitwise_or: ('!=') bitwise_or +static CmpopExprPair* +noteq_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // ('!=') bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); + void *_tmp_92_var; + expr_ty a; + if ( + (_tmp_92_var = _tmp_92_rule(p)) // '!=' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ noteq_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s noteq_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('!=') bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lte_bitwise_or: '<=' bitwise_or +static CmpopExprPair* +lte_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // '<=' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lte_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<=' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 29)) // token='<=' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ lte_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<=' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lte_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<=' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// lt_bitwise_or: '<' bitwise_or +static CmpopExprPair* +lt_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // '<' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> lt_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 20)) // token='<' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ lt_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s lt_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// gte_bitwise_or: '>=' bitwise_or +static CmpopExprPair* +gte_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // '>=' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> gte_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>=' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 30)) // token='>=' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ gte_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>=' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s gte_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>=' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// gt_bitwise_or: '>' bitwise_or +static CmpopExprPair* +gt_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // '>' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> gt_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 21)) // token='>' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ gt_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s gt_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// notin_bitwise_or: 'not' 'in' bitwise_or +static CmpopExprPair* +notin_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // 'not' 'in' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> notin_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'not' 'in' bitwise_or")); + Token * _keyword; + Token * _keyword_1; + expr_ty a; + if ( + (_keyword = _PyPegen_expect_token(p, 525)) // token='not' + && + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ notin_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'not' 'in' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s notin_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'not' 'in' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// in_bitwise_or: 'in' bitwise_or +static CmpopExprPair* +in_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // 'in' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> in_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'in' bitwise_or")); + Token * _keyword; + expr_ty a; + if ( + (_keyword = _PyPegen_expect_token(p, 518)) // token='in' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ in_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'in' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , In , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s in_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'in' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// isnot_bitwise_or: 'is' 'not' bitwise_or +static CmpopExprPair* +isnot_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // 'is' 'not' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> isnot_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'is' 'not' bitwise_or")); + Token * _keyword; + Token * _keyword_1; + expr_ty a; + if ( + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' + && + (_keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ isnot_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'is' 'not' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s isnot_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'is' 'not' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// is_bitwise_or: 'is' bitwise_or +static CmpopExprPair* +is_bitwise_or_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + CmpopExprPair* _res = NULL; + int _mark = p->mark; + { // 'is' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> is_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'is' bitwise_or")); + Token * _keyword; + expr_ty a; + if ( + (_keyword = _PyPegen_expect_token(p, 526)) // token='is' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ is_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'is' bitwise_or")); + _res = _PyPegen_cmpop_expr_pair ( p , Is , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s is_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'is' bitwise_or")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// Left-recursive +// bitwise_or: bitwise_or '|' bitwise_xor | bitwise_xor +static expr_ty bitwise_or_raw(Parser *); +static expr_ty +bitwise_or_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_or_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_1 = _PyPegen_update_memo(p, _mark, bitwise_or_type, _res); + if (tmpvar_1) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = bitwise_or_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +bitwise_or_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // bitwise_or '|' bitwise_xor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or '|' bitwise_xor")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = bitwise_or_rule(p)) // bitwise_or + && + (_literal = _PyPegen_expect_token(p, 18)) // token='|' + && + (b = bitwise_xor_rule(p)) // bitwise_xor + ) + { + D(fprintf(stderr, "%*c+ bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or '|' bitwise_xor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitOr , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or '|' bitwise_xor")); + } + { // bitwise_xor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_xor")); + expr_ty bitwise_xor_var; + if ( + (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor + ) + { + D(fprintf(stderr, "%*c+ bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_xor")); + _res = bitwise_xor_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_xor")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// Left-recursive +// bitwise_xor: bitwise_xor '^' bitwise_and | bitwise_and +static expr_ty bitwise_xor_raw(Parser *); +static expr_ty +bitwise_xor_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_xor_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_2 = _PyPegen_update_memo(p, _mark, bitwise_xor_type, _res); + if (tmpvar_2) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = bitwise_xor_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +bitwise_xor_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // bitwise_xor '^' bitwise_and + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> bitwise_xor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_xor '^' bitwise_and")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = bitwise_xor_rule(p)) // bitwise_xor + && + (_literal = _PyPegen_expect_token(p, 32)) // token='^' + && + (b = bitwise_and_rule(p)) // bitwise_and + ) + { + D(fprintf(stderr, "%*c+ bitwise_xor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_xor '^' bitwise_and")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitXor , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_xor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_xor '^' bitwise_and")); + } + { // bitwise_and + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> bitwise_xor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_and")); + expr_ty bitwise_and_var; + if ( + (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and + ) + { + D(fprintf(stderr, "%*c+ bitwise_xor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_and")); + _res = bitwise_and_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_xor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_and")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// Left-recursive +// bitwise_and: bitwise_and '&' shift_expr | shift_expr +static expr_ty bitwise_and_raw(Parser *); +static expr_ty +bitwise_and_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, bitwise_and_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_3 = _PyPegen_update_memo(p, _mark, bitwise_and_type, _res); + if (tmpvar_3) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = bitwise_and_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +bitwise_and_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // bitwise_and '&' shift_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> bitwise_and[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_and '&' shift_expr")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = bitwise_and_rule(p)) // bitwise_and + && + (_literal = _PyPegen_expect_token(p, 19)) // token='&' + && + (b = shift_expr_rule(p)) // shift_expr + ) + { + D(fprintf(stderr, "%*c+ bitwise_and[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_and '&' shift_expr")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , BitAnd , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_and[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_and '&' shift_expr")); + } + { // shift_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> bitwise_and[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr")); + expr_ty shift_expr_var; + if ( + (shift_expr_var = shift_expr_rule(p)) // shift_expr + ) + { + D(fprintf(stderr, "%*c+ bitwise_and[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr")); + _res = shift_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s bitwise_and[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// Left-recursive +// shift_expr: shift_expr '<<' sum | shift_expr '>>' sum | sum +static expr_ty shift_expr_raw(Parser *); +static expr_ty +shift_expr_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, shift_expr_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_4 = _PyPegen_update_memo(p, _mark, shift_expr_type, _res); + if (tmpvar_4) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = shift_expr_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +shift_expr_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // shift_expr '<<' sum + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr '<<' sum")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = shift_expr_rule(p)) // shift_expr + && + (_literal = _PyPegen_expect_token(p, 33)) // token='<<' + && + (b = sum_rule(p)) // sum + ) + { + D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr '<<' sum")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , LShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr '<<' sum")); + } + { // shift_expr '>>' sum + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr '>>' sum")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = shift_expr_rule(p)) // shift_expr + && + (_literal = _PyPegen_expect_token(p, 34)) // token='>>' + && + (b = sum_rule(p)) // sum + ) + { + D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr '>>' sum")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , RShift , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr '>>' sum")); + } + { // sum + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum")); + expr_ty sum_var; + if ( + (sum_var = sum_rule(p)) // sum + ) + { + D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum")); + _res = sum_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// Left-recursive +// sum: sum '+' term | sum '-' term | term +static expr_ty sum_raw(Parser *); +static expr_ty +sum_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, sum_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_5 = _PyPegen_update_memo(p, _mark, sum_type, _res); + if (tmpvar_5) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = sum_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +sum_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // sum '+' term + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum '+' term")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = sum_rule(p)) // sum + && + (_literal = _PyPegen_expect_token(p, 14)) // token='+' + && + (b = term_rule(p)) // term + ) + { + D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum '+' term")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Add , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum '+' term")); + } + { // sum '-' term + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum '-' term")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = sum_rule(p)) // sum + && + (_literal = _PyPegen_expect_token(p, 15)) // token='-' + && + (b = term_rule(p)) // term + ) + { + D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum '-' term")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Sub , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum '-' term")); + } + { // term + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term")); + expr_ty term_var; + if ( + (term_var = term_rule(p)) // term + ) + { + D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term")); + _res = term_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// Left-recursive +// term: +// | term '*' factor +// | term '/' factor +// | term '//' factor +// | term '%' factor +// | term '@' factor +// | factor +static expr_ty term_raw(Parser *); +static expr_ty +term_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, term_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_6 = _PyPegen_update_memo(p, _mark, term_type, _res); + if (tmpvar_6) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = term_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +term_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // term '*' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '*' factor")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = term_rule(p)) // term + && + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (b = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '*' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mult , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '*' factor")); + } + { // term '/' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '/' factor")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = term_rule(p)) // term + && + (_literal = _PyPegen_expect_token(p, 17)) // token='/' + && + (b = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '/' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Div , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '/' factor")); + } + { // term '//' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '//' factor")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = term_rule(p)) // term + && + (_literal = _PyPegen_expect_token(p, 47)) // token='//' + && + (b = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '//' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '//' factor")); + } + { // term '%' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '%' factor")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = term_rule(p)) // term + && + (_literal = _PyPegen_expect_token(p, 24)) // token='%' + && + (b = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '%' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Mod , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '%' factor")); + } + { // term '@' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '@' factor")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = term_rule(p)) // term + && + (_literal = _PyPegen_expect_token(p, 49)) // token='@' + && + (b = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '@' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '@' factor")); + } + { // factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "factor")); + expr_ty factor_var; + if ( + (factor_var = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "factor")); + _res = factor_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "factor")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// factor: '+' factor | '-' factor | '~' factor | power +static expr_ty +factor_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, factor_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '+' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+' factor")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 14)) // token='+' + && + (a = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( UAdd , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+' factor")); + } + { // '-' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-' factor")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 15)) // token='-' + && + (a = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( USub , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-' factor")); + } + { // '~' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'~' factor")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 31)) // token='~' + && + (a = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'~' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_UnaryOp ( Invert , a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'~' factor")); + } + { // power + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "power")); + expr_ty power_var; + if ( + (power_var = power_rule(p)) // power + ) + { + D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "power")); + _res = power_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "power")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, factor_type, _res); + D(p->level--); + return _res; +} + +// power: await_primary '**' factor | await_primary +static expr_ty +power_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // await_primary '**' factor + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> power[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "await_primary '**' factor")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = await_primary_rule(p)) // await_primary + && + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + && + (b = factor_rule(p)) // factor + ) + { + D(fprintf(stderr, "%*c+ power[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "await_primary '**' factor")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_BinOp ( a , Pow , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s power[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "await_primary '**' factor")); + } + { // await_primary + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> power[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "await_primary")); + expr_ty await_primary_var; + if ( + (await_primary_var = await_primary_rule(p)) // await_primary + ) + { + D(fprintf(stderr, "%*c+ power[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "await_primary")); + _res = await_primary_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s power[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "await_primary")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// await_primary: AWAIT primary | primary +static expr_ty +await_primary_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, await_primary_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // AWAIT primary + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); + expr_ty a; + Token * await_var; + if ( + (await_var = _PyPegen_expect_token(p, AWAIT)) // token='AWAIT' + && + (a = primary_rule(p)) // primary + ) + { + D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "AWAIT primary")); + } + { // primary + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary")); + expr_ty primary_var; + if ( + (primary_var = primary_rule(p)) // primary + ) + { + D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary")); + _res = primary_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, await_primary_type, _res); + D(p->level--); + return _res; +} + +// Left-recursive +// primary: +// | primary '.' NAME +// | primary genexp +// | primary '(' arguments? ')' +// | primary '[' slices ']' +// | atom +static expr_ty primary_raw(Parser *); +static expr_ty +primary_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, primary_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_7 = _PyPegen_update_memo(p, _mark, primary_type, _res); + if (tmpvar_7) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +primary_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // primary '.' NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '.' NAME")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = primary_rule(p)) // primary + && + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + && + (b = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '.' NAME")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '.' NAME")); + } + { // primary genexp + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary genexp")); + expr_ty a; + expr_ty b; + if ( + (a = primary_rule(p)) // primary + && + (b = genexp_rule(p)) // genexp + ) + { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary genexp")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary genexp")); + } + { // primary '(' arguments? ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '(' arguments? ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + void *b; + if ( + (a = primary_rule(p)) // primary + && + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (b = arguments_rule(p), 1) // arguments? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '(' arguments? ')'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '(' arguments? ')'")); + } + { // primary '[' slices ']' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '[' slices ']'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + expr_ty b; + if ( + (a = primary_rule(p)) // primary + && + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '[' slices ']'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '[' slices ']'")); + } + { // atom + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "atom")); + expr_ty atom_var; + if ( + (atom_var = atom_rule(p)) // atom + ) + { + D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "atom")); + _res = atom_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "atom")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// slices: slice !',' | ','.slice+ ','? +static expr_ty +slices_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // slice !',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slices[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice !','")); + expr_ty a; + if ( + (a = slice_rule(p)) // slice + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' + ) + { + D(fprintf(stderr, "%*c+ slices[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice !','")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slices[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice !','")); + } + { // ','.slice+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slices[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.slice+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + if ( + (a = _gather_93_rule(p)) // ','.slice+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ slices[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.slice+ ','?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slices[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.slice+ ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// slice: expression? ':' expression? [':' expression?] | expression +static expr_ty +slice_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // expression? ':' expression? [':' expression?] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slice[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); + Token * _literal; + void *a; + void *b; + void *c; + if ( + (a = expression_rule(p), 1) // expression? + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = expression_rule(p), 1) // expression? + && + (c = _tmp_95_rule(p), 1) // [':' expression?] + ) + { + D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Slice ( a , b , c , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slice[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression? ':' expression? [':' expression?]")); + } + { // expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> slice[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); + expr_ty a; + if ( + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s slice[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// atom: +// | NAME +// | 'True' +// | 'False' +// | 'None' +// | &STRING strings +// | NUMBER +// | &'(' (tuple | group | genexp) +// | &'[' (list | listcomp) +// | &'{' (dict | set | dictcomp | setcomp) +// | '...' +static expr_ty +atom_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); + expr_ty name_var; + if ( + (name_var = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); + _res = name_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); + } + { // 'True' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 527)) // token='True' + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_True , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); + } + { // 'False' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 528)) // token='False' + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_False , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); + } + { // 'None' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 529)) // token='None' + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_None , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); + } + { // &STRING strings + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&STRING strings")); + expr_ty strings_var; + if ( + _PyPegen_lookahead(1, _PyPegen_string_token, p) + && + (strings_var = strings_rule(p)) // strings + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&STRING strings")); + _res = strings_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&STRING strings")); + } + { // NUMBER + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NUMBER")); + expr_ty number_var; + if ( + (number_var = _PyPegen_number_token(p)) // NUMBER + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NUMBER")); + _res = number_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NUMBER")); + } + { // &'(' (tuple | group | genexp) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); + void *_tmp_96_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' + && + (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); + _res = _tmp_96_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'(' (tuple | group | genexp)")); + } + { // &'[' (list | listcomp) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); + void *_tmp_97_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' + && + (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); + _res = _tmp_97_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'[' (list | listcomp)")); + } + { // &'{' (dict | set | dictcomp | setcomp) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); + void *_tmp_98_var; + if ( + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' + && + (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); + _res = _tmp_98_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); + } + { // '...' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 52)) // token='...' + ) + { + D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// strings: STRING+ +static expr_ty +strings_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, strings_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + { // STRING+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING+")); + asdl_seq * a; + if ( + (a = _loop1_99_rule(p)) // STRING+ + ) + { + D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING+")); + _res = _PyPegen_concatenate_strings ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s strings[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING+")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, strings_type, _res); + D(p->level--); + return _res; +} + +// list: '[' star_named_expressions? ']' +static expr_ty +list_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '[' star_named_expressions? ']' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> list[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' star_named_expressions? ']'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (a = star_named_expressions_rule(p), 1) // star_named_expressions? + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + D(fprintf(stderr, "%*c+ list[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' star_named_expressions? ']'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s list[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' star_named_expressions? ']'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension +static expr_ty +listcomp_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '[' named_expression for_if_clauses ']' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + asdl_seq* b; + if ( + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (a = named_expression_rule(p)) // named_expression + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_ListComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s listcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' named_expression for_if_clauses ']'")); + } + { // invalid_comprehension + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); + void *invalid_comprehension_var; + if ( + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension + ) + { + D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); + _res = invalid_comprehension_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s listcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// tuple: '(' [star_named_expression ',' star_named_expressions?] ')' +static expr_ty +tuple_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '(' [star_named_expression ',' star_named_expressions?] ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> tuple[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ tuple[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s tuple[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// group: '(' (yield_expr | named_expression) ')' +static expr_ty +group_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // '(' (yield_expr | named_expression) ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> group[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = _tmp_101_rule(p)) // yield_expr | named_expression + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ group[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s group[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// genexp: '(' expression for_if_clauses ')' | invalid_comprehension +static expr_ty +genexp_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '(' expression for_if_clauses ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + asdl_seq* b; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = expression_rule(p)) // expression + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_GeneratorExp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s genexp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' expression for_if_clauses ')'")); + } + { // invalid_comprehension + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); + void *invalid_comprehension_var; + if ( + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension + ) + { + D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); + _res = invalid_comprehension_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s genexp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// set: '{' expressions_list '}' +static expr_ty +set_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '{' expressions_list '}' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> set[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expressions_list '}'")); + Token * _literal; + Token * _literal_1; + asdl_seq* a; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = expressions_list_rule(p)) // expressions_list + && + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ set[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expressions_list '}'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Set ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s set[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expressions_list '}'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// setcomp: '{' expression for_if_clauses '}' | invalid_comprehension +static expr_ty +setcomp_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '{' expression for_if_clauses '}' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + asdl_seq* b; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = expression_rule(p)) // expression + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_SetComp ( a , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s setcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expression for_if_clauses '}'")); + } + { // invalid_comprehension + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); + void *invalid_comprehension_var; + if ( + (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension + ) + { + D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); + _res = invalid_comprehension_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s setcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// dict: '{' double_starred_kvpairs? '}' +static expr_ty +dict_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '{' double_starred_kvpairs? '}' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> dict[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' double_starred_kvpairs? '}'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = double_starred_kvpairs_rule(p), 1) // double_starred_kvpairs? + && + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ dict[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' double_starred_kvpairs? '}'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s dict[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' double_starred_kvpairs? '}'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// dictcomp: '{' kvpair for_if_clauses '}' | invalid_dict_comprehension +static expr_ty +dictcomp_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '{' kvpair for_if_clauses '}' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> dictcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' kvpair for_if_clauses '}'")); + Token * _literal; + Token * _literal_1; + KeyValuePair* a; + asdl_seq* b; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = kvpair_rule(p)) // kvpair + && + (b = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ dictcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' kvpair for_if_clauses '}'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s dictcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' kvpair for_if_clauses '}'")); + } + { // invalid_dict_comprehension + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> dictcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_dict_comprehension")); + void *invalid_dict_comprehension_var; + if ( + (invalid_dict_comprehension_var = invalid_dict_comprehension_rule(p)) // invalid_dict_comprehension + ) + { + D(fprintf(stderr, "%*c+ dictcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_dict_comprehension")); + _res = invalid_dict_comprehension_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s dictcomp[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_dict_comprehension")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// double_starred_kvpairs: ','.double_starred_kvpair+ ','? +static asdl_seq* +double_starred_kvpairs_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.double_starred_kvpair+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + if ( + (a = _gather_102_rule(p)) // ','.double_starred_kvpair+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ','?")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s double_starred_kvpairs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.double_starred_kvpair+ ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// double_starred_kvpair: '**' bitwise_or | kvpair +static KeyValuePair* +double_starred_kvpair_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + KeyValuePair* _res = NULL; + int _mark = p->mark; + { // '**' bitwise_or + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> double_starred_kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' bitwise_or")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + && + (a = bitwise_or_rule(p)) // bitwise_or + ) + { + D(fprintf(stderr, "%*c+ double_starred_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' bitwise_or")); + _res = _PyPegen_key_value_pair ( p , NULL , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s double_starred_kvpair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' bitwise_or")); + } + { // kvpair + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> double_starred_kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kvpair")); + KeyValuePair* kvpair_var; + if ( + (kvpair_var = kvpair_rule(p)) // kvpair + ) + { + D(fprintf(stderr, "%*c+ double_starred_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kvpair")); + _res = kvpair_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s double_starred_kvpair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kvpair")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// kvpair: expression ':' expression +static KeyValuePair* +kvpair_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + KeyValuePair* _res = NULL; + int _mark = p->mark; + { // expression ':' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); + _res = _PyPegen_key_value_pair ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kvpair[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// for_if_clauses: for_if_clause+ +static asdl_seq* +for_if_clauses_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // for_if_clause+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); + asdl_seq * _loop1_104_var; + if ( + (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ + ) + { + D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); + _res = _loop1_104_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clauses[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// for_if_clause: +// | ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* +// | 'for' star_targets 'in' disjunction (('if' disjunction))* +static comprehension_ty +for_if_clause_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + comprehension_ty _res = NULL; + int _mark = p->mark; + { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); + Token * _keyword; + Token * _keyword_1; + expr_ty a; + Token * async_var; + expr_ty b; + asdl_seq * c; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + && + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' + && + (a = star_targets_rule(p)) // star_targets + && + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + && + (b = disjunction_rule(p)) // disjunction + && + (c = _loop0_105_rule(p)) // (('if' disjunction))* + ) + { + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); + _res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); + } + { // 'for' star_targets 'in' disjunction (('if' disjunction))* + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); + Token * _keyword; + Token * _keyword_1; + expr_ty a; + expr_ty b; + asdl_seq * c; + if ( + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' + && + (a = star_targets_rule(p)) // star_targets + && + (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' + && + (b = disjunction_rule(p)) // disjunction + && + (c = _loop0_106_rule(p)) // (('if' disjunction))* + ) + { + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); + _res = _Py_comprehension ( a , b , c , 0 , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// yield_expr: 'yield' 'from' expression | 'yield' star_expressions? +static expr_ty +yield_expr_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // 'yield' 'from' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> yield_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'yield' 'from' expression")); + Token * _keyword; + Token * _keyword_1; + expr_ty a; + if ( + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' + && + (_keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' + && + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ yield_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'yield' 'from' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_YieldFrom ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s yield_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'yield' 'from' expression")); + } + { // 'yield' star_expressions? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> yield_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'yield' star_expressions?")); + Token * _keyword; + void *a; + if ( + (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' + && + (a = star_expressions_rule(p), 1) // star_expressions? + ) + { + D(fprintf(stderr, "%*c+ yield_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'yield' star_expressions?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Yield ( a , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s yield_expr[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'yield' star_expressions?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// arguments: args ','? &')' | incorrect_arguments +static expr_ty +arguments_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, arguments_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + { // args ','? &')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','? &')'")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + if ( + (a = args_rule(p)) // args + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + && + _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' + ) + { + D(fprintf(stderr, "%*c+ arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','? &')'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','? &')'")); + } + { // incorrect_arguments + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "incorrect_arguments")); + void *incorrect_arguments_var; + if ( + (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments + ) + { + D(fprintf(stderr, "%*c+ arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "incorrect_arguments")); + _res = incorrect_arguments_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "incorrect_arguments")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, arguments_type, _res); + D(p->level--); + return _res; +} + +// args: starred_expression [',' args] | kwargs | named_expression [',' args] +static expr_ty +args_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // starred_expression [',' args] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression [',' args]")); + expr_ty a; + void *b; + if ( + (a = starred_expression_rule(p)) // starred_expression + && + (b = _tmp_107_rule(p), 1) // [',' args] + ) + { + D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression [',' args]")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression [',' args]")); + } + { // kwargs + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwargs")); + asdl_seq* a; + if ( + (a = kwargs_rule(p)) // kwargs + ) + { + D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwargs")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwargs")); + } + { // named_expression [',' args] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression [',' args]")); + expr_ty a; + void *b; + if ( + (a = named_expression_rule(p)) // named_expression + && + (b = _tmp_108_rule(p), 1) // [',' args] + ) + { + D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression [',' args]")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression [',' args]")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// kwargs: +// | ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ +// | ','.kwarg_or_starred+ +// | ','.kwarg_or_double_starred+ +static asdl_seq* +kwargs_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); + Token * _literal; + asdl_seq * a; + asdl_seq * b; + if ( + (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ + ) + { + D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); + _res = _PyPegen_join_sequences ( p , a , b ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); + } + { // ','.kwarg_or_starred+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); + asdl_seq * _gather_113_var; + if ( + (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ + ) + { + D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); + _res = _gather_113_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_starred+")); + } + { // ','.kwarg_or_double_starred+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); + asdl_seq * _gather_115_var; + if ( + (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ + ) + { + D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); + _res = _gather_115_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_double_starred+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// starred_expression: '*' expression +static expr_ty +starred_expression_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '*' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> starred_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ starred_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( a , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s starred_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// kwarg_or_starred: NAME '=' expression | starred_expression | invalid_kwarg +static KeywordOrStarred* +kwarg_or_starred_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + KeywordOrStarred* _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME '=' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = _PyPegen_name_token(p)) // NAME + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression")); + } + { // starred_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); + expr_ty a; + if ( + (a = starred_expression_rule(p)) // starred_expression + ) + { + D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); + _res = _PyPegen_keyword_or_starred ( p , a , 0 ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); + } + { // invalid_kwarg + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); + _res = invalid_kwarg_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_kwarg")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// kwarg_or_double_starred: NAME '=' expression | '**' expression | invalid_kwarg +static KeywordOrStarred* +kwarg_or_double_starred_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + KeywordOrStarred* _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME '=' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = _PyPegen_name_token(p)) // NAME + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (b = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression")); + } + { // '**' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression")); + Token * _literal; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + && + (a = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression")); + } + { // invalid_kwarg + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); + void *invalid_kwarg_var; + if ( + (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg + ) + { + D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); + _res = invalid_kwarg_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_kwarg")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// star_targets: star_target !',' | star_target ((',' star_target))* ','? +static expr_ty +star_targets_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // star_target !',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target !','")); + expr_ty a; + if ( + (a = star_target_rule(p)) // star_target + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' + ) + { + D(fprintf(stderr, "%*c+ star_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target !','")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target !','")); + } + { // star_target ((',' star_target))* ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target ((',' star_target))* ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + asdl_seq * b; + if ( + (a = star_target_rule(p)) // star_target + && + (b = _loop0_117_rule(p)) // ((',' star_target))* + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ star_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target ((',' star_target))* ','?")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target ((',' star_target))* ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// star_targets_seq: ','.star_target+ ','? +static asdl_seq* +star_targets_seq_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.star_target+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_targets_seq[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_target+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + if ( + (a = _gather_118_rule(p)) // ','.star_target+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ star_targets_seq[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_target+ ','?")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_targets_seq[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_target+ ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// star_target: +// | '*' (!'*' star_target) +// | t_primary '.' NAME !t_lookahead +// | t_primary '[' slices ']' !t_lookahead +// | star_atom +static expr_ty +star_target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, star_target_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // '*' (!'*' star_target) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); + Token * _literal; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (a = _tmp_120_rule(p)) // !'*' star_target + ) + { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (!'*' star_target)")); + } + { // t_primary '.' NAME !t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + && + (b = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(0, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + } + { // t_primary '[' slices ']' !t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + Token * _literal; + Token * _literal_1; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + && + _PyPegen_lookahead(0, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + } + { // star_atom + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_atom")); + expr_ty star_atom_var; + if ( + (star_atom_var = star_atom_rule(p)) // star_atom + ) + { + D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_atom")); + _res = star_atom_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_atom")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, star_target_type, _res); + D(p->level--); + return _res; +} + +// star_atom: +// | NAME +// | '(' star_target ')' +// | '(' star_targets_seq? ')' +// | '[' star_targets_seq? ']' +static expr_ty +star_atom_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); + expr_ty a; + if ( + (a = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); + } + { // '(' star_target ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' star_target ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = star_target_rule(p)) // star_target + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' star_target ')'")); + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' star_target ')'")); + } + { // '(' star_targets_seq? ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' star_targets_seq? ')'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = star_targets_seq_rule(p), 1) // star_targets_seq? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' star_targets_seq? ')'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' star_targets_seq? ')'")); + } + { // '[' star_targets_seq? ']' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' star_targets_seq? ']'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (a = star_targets_seq_rule(p), 1) // star_targets_seq? + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' star_targets_seq? ']'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' star_targets_seq? ']'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// single_target: single_subscript_attribute_target | NAME | '(' single_target ')' +static expr_ty +single_target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // single_subscript_attribute_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); + expr_ty single_subscript_attribute_target_var; + if ( + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target + ) + { + D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); + _res = single_subscript_attribute_target_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target")); + } + { // NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); + expr_ty a; + if ( + (a = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); + } + { // '(' single_target ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = single_target_rule(p)) // single_target + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// single_subscript_attribute_target: +// | t_primary '.' NAME !t_lookahead +// | t_primary '[' slices ']' !t_lookahead +static expr_ty +single_subscript_attribute_target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // t_primary '.' NAME !t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> single_subscript_attribute_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + && + (b = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(0, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ single_subscript_attribute_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s single_subscript_attribute_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + } + { // t_primary '[' slices ']' !t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> single_subscript_attribute_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + Token * _literal; + Token * _literal_1; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + && + _PyPegen_lookahead(0, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ single_subscript_attribute_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s single_subscript_attribute_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// del_targets: ','.del_target+ ','? +static asdl_seq* +del_targets_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.del_target+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.del_target+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + if ( + (a = _gather_121_rule(p)) // ','.del_target+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ del_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.del_target+ ','?")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.del_target+ ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// del_target: +// | t_primary '.' NAME &del_target_end +// | t_primary '[' slices ']' &del_target_end +// | del_t_atom +static expr_ty +del_target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, del_target_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // t_primary '.' NAME &del_target_end + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + && + (b = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(1, del_target_end_rule, p) + ) + { + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME &del_target_end")); + } + { // t_primary '[' slices ']' &del_target_end + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); + Token * _literal; + Token * _literal_1; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + && + _PyPegen_lookahead(1, del_target_end_rule, p) + ) + { + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); + } + { // del_t_atom + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_t_atom")); + expr_ty del_t_atom_var; + if ( + (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom + ) + { + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_t_atom")); + _res = del_t_atom_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_t_atom")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, del_target_type, _res); + D(p->level--); + return _res; +} + +// del_t_atom: +// | NAME &del_target_end +// | '(' del_target ')' +// | '(' del_targets? ')' +// | '[' del_targets? ']' +// | invalid_del_target +static expr_ty +del_t_atom_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME &del_target_end + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); + expr_ty a; + if ( + (a = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(1, del_target_end_rule, p) + ) + { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME &del_target_end")); + } + { // '(' del_target ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' del_target ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = del_target_rule(p)) // del_target + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' del_target ')'")); + _res = _PyPegen_set_expr_context ( p , a , Del ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' del_target ')'")); + } + { // '(' del_targets? ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' del_targets? ')'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = del_targets_rule(p), 1) // del_targets? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' del_targets? ')'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' del_targets? ')'")); + } + { // '[' del_targets? ']' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' del_targets? ']'")); + Token * _literal; + Token * _literal_1; + void *a; + if ( + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (a = del_targets_rule(p), 1) // del_targets? + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' del_targets? ']'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( a , Del , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' del_targets? ']'")); + } + { // invalid_del_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); + void *invalid_del_target_var; + if ( + (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target + ) + { + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); + _res = invalid_del_target_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_del_target")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// del_target_end: ')' | ']' | ',' | ';' | NEWLINE +static void * +del_target_end_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); + } + { // ']' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "']'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "']'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "']'")); + } + { // ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); + } + { // ';' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 13)) // token=';' + ) + { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'")); + } + { // NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + _res = newline_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// targets: ','.target+ ','? +static asdl_seq* +targets_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq* _res = NULL; + int _mark = p->mark; + { // ','.target+ ','? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.target+ ','?")); + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + asdl_seq * a; + if ( + (a = _gather_123_rule(p)) // ','.target+ + && + (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? + ) + { + D(fprintf(stderr, "%*c+ targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.target+ ','?")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.target+ ','?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// target: +// | t_primary '.' NAME !t_lookahead +// | t_primary '[' slices ']' !t_lookahead +// | t_atom +static expr_ty +target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, target_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // t_primary '.' NAME !t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + && + (b = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(0, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); + } + { // t_primary '[' slices ']' !t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + Token * _literal; + Token * _literal_1; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + && + _PyPegen_lookahead(0, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); + } + { // t_atom + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_atom")); + expr_ty t_atom_var; + if ( + (t_atom_var = t_atom_rule(p)) // t_atom + ) + { + D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_atom")); + _res = t_atom_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_atom")); + } + _res = NULL; + done: + _PyPegen_insert_memo(p, _mark, target_type, _res); + D(p->level--); + return _res; +} + +// Left-recursive +// t_primary: +// | t_primary '.' NAME &t_lookahead +// | t_primary '[' slices ']' &t_lookahead +// | t_primary genexp &t_lookahead +// | t_primary '(' arguments? ')' &t_lookahead +// | atom &t_lookahead +static expr_ty t_primary_raw(Parser *); +static expr_ty +t_primary_rule(Parser *p) +{ + D(p->level++); + expr_ty _res = NULL; + if (_PyPegen_is_memoized(p, t_primary_type, &_res)) { + D(p->level--); + return _res; + } + int _mark = p->mark; + int _resmark = p->mark; + while (1) { + int tmpvar_8 = _PyPegen_update_memo(p, _mark, t_primary_type, _res); + if (tmpvar_8) { + D(p->level--); + return _res; + } + p->mark = _mark; + void *_raw = t_primary_raw(p); + if (_raw == NULL || p->mark <= _resmark) + break; + _resmark = p->mark; + _res = _raw; + } + p->mark = _resmark; + D(p->level--); + return _res; +} +static expr_ty +t_primary_raw(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // t_primary '.' NAME &t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &t_lookahead")); + Token * _literal; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + && + (b = _PyPegen_name_token(p)) // NAME + && + _PyPegen_lookahead(1, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME &t_lookahead")); + } + { // t_primary '[' slices ']' &t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); + Token * _literal; + Token * _literal_1; + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (b = slices_rule(p)) // slices + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + && + _PyPegen_lookahead(1, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Subscript ( a , b , Load , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); + } + { // t_primary genexp &t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary genexp &t_lookahead")); + expr_ty a; + expr_ty b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (b = genexp_rule(p)) // genexp + && + _PyPegen_lookahead(1, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary genexp &t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary genexp &t_lookahead")); + } + { // t_primary '(' arguments? ')' &t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); + Token * _literal; + Token * _literal_1; + expr_ty a; + void *b; + if ( + (a = t_primary_rule(p)) // t_primary + && + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (b = arguments_rule(p), 1) // arguments? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + && + _PyPegen_lookahead(1, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); + } + { // atom &t_lookahead + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "atom &t_lookahead")); + expr_ty a; + if ( + (a = atom_rule(p)) // atom + && + _PyPegen_lookahead(1, t_lookahead_rule, p) + ) + { + D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "atom &t_lookahead")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "atom &t_lookahead")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// t_lookahead: '(' | '[' | '.' +static void * +t_lookahead_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '(' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + ) + { + D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); + } + { // '[' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + ) + { + D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); + } + { // '.' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + ) + { + D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// t_atom: NAME | '(' target ')' | '(' targets? ')' | '[' targets? ']' +static expr_ty +t_atom_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + int _start_lineno = p->tokens[_mark]->lineno; + UNUSED(_start_lineno); // Only used by EXTRA macro + int _start_col_offset = p->tokens[_mark]->col_offset; + UNUSED(_start_col_offset); // Only used by EXTRA macro + { // NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); + expr_ty a; + if ( + (a = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); + } + { // '(' target ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' target ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = target_rule(p)) // target + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' target ')'")); + _res = _PyPegen_set_expr_context ( p , a , Store ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' target ')'")); + } + { // '(' targets? ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' targets? ')'")); + Token * _literal; + Token * _literal_1; + void *b; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (b = targets_rule(p), 1) // targets? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' targets? ')'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_Tuple ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' targets? ')'")); + } + { // '[' targets? ']' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' targets? ']'")); + Token * _literal; + Token * _literal_1; + void *b; + if ( + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + && + (b = targets_rule(p), 1) // targets? + && + (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' + ) + { + D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' targets? ']'")); + Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); + if (_token == NULL) { + D(p->level--); + return NULL; + } + int _end_lineno = _token->end_lineno; + UNUSED(_end_lineno); // Only used by EXTRA macro + int _end_col_offset = _token->end_col_offset; + UNUSED(_end_col_offset); // Only used by EXTRA macro + _res = _Py_List ( b , Store , EXTRA ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' targets? ']'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// incorrect_arguments: +// | args ',' '*' +// | expression for_if_clauses ',' [args | expression for_if_clauses] +// | args for_if_clauses +// | args ',' expression for_if_clauses +// | args ',' args +static void * +incorrect_arguments_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // args ',' '*' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' '*'")); + Token * _literal; + Token * _literal_1; + expr_ty args_var; + if ( + (args_var = args_rule(p)) // args + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' + ) + { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' '*'")); + _res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' '*'")); + } + { // expression for_if_clauses ',' [args | expression for_if_clauses] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + asdl_seq* for_if_clauses_var; + if ( + (a = expression_rule(p)) // expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] + ) + { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); + } + { // args for_if_clauses + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args for_if_clauses")); + expr_ty a; + asdl_seq* for_if_clauses_var; + if ( + (a = args_rule(p)) // args + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args for_if_clauses")); + _res = _PyPegen_nonparen_genexp_in_call ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args for_if_clauses")); + } + { // args ',' expression for_if_clauses + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' expression for_if_clauses")); + Token * _literal; + expr_ty a; + expr_ty args_var; + asdl_seq* for_if_clauses_var; + if ( + (args_var = args_rule(p)) // args + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (a = expression_rule(p)) // expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' expression for_if_clauses")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' expression for_if_clauses")); + } + { // args ',' args + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' args")); + Token * _literal; + expr_ty a; + expr_ty args_var; + if ( + (a = args_rule(p)) // args + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (args_var = args_rule(p)) // args + ) + { + D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' args")); + _res = _PyPegen_arguments_parsing_error ( p , a ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' args")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_kwarg: expression '=' +static void * +invalid_kwarg_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // expression '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_kwarg[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression '='")); + Token * _literal; + expr_ty a; + if ( + (a = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ invalid_kwarg[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression '='")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "expression cannot contain assignment, perhaps you meant \"==\"?" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_kwarg[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression '='")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_named_expression: expression ':=' expression +static void * +invalid_named_expression_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // expression ':=' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':=' expression")); + Token * _literal; + expr_ty a; + expr_ty expression_var; + if ( + (a = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 53)) // token=':=' + && + (expression_var = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':=' expression")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_named_expression[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':=' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_assignment: +// | list ':' +// | tuple ':' +// | star_named_expression ',' star_named_expressions* ':' +// | expression ':' expression ['=' annotated_rhs] +// | ((star_targets '='))* star_expressions '=' +// | ((star_targets '='))* yield_expr '=' +// | star_expressions augassign (yield_expr | star_expressions) +static void * +invalid_assignment_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // list ':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list ':'")); + Token * _literal; + expr_ty a; + if ( + (a = list_rule(p)) // list + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list ':'")); + } + { // tuple ':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple ':'")); + Token * _literal; + expr_ty a; + if ( + (a = tuple_rule(p)) // tuple + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple ':'")); + } + { // star_named_expression ',' star_named_expressions* ':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); + Token * _literal; + Token * _literal_1; + asdl_seq * _loop0_126_var; + expr_ty a; + if ( + (a = star_named_expression_rule(p)) // star_named_expression + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_loop0_126_var = _loop0_126_rule(p)) // star_named_expressions* + && + (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); + } + { // expression ':' expression ['=' annotated_rhs] + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); + Token * _literal; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + expr_ty expression_var; + if ( + (a = expression_rule(p)) // expression + && + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (expression_var = expression_rule(p)) // expression + && + (_opt_var = _tmp_127_rule(p), 1) // ['=' annotated_rhs] + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "illegal target for annotation" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); + } + { // ((star_targets '='))* star_expressions '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); + Token * _literal; + asdl_seq * _loop0_128_var; + expr_ty a; + if ( + (_loop0_128_var = _loop0_128_rule(p)) // ((star_targets '='))* + && + (a = star_expressions_rule(p)) // star_expressions + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))* star_expressions '='")); + } + { // ((star_targets '='))* yield_expr '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); + Token * _literal; + asdl_seq * _loop0_129_var; + expr_ty a; + if ( + (_loop0_129_var = _loop0_129_rule(p)) // ((star_targets '='))* + && + (a = yield_expr_rule(p)) // yield_expr + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "assignment to yield expression not possible" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))* yield_expr '='")); + } + { // star_expressions augassign (yield_expr | star_expressions) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); + void *_tmp_130_var; + expr_ty a; + AugOperator* augassign_var; + if ( + (a = star_expressions_rule(p)) // star_expressions + && + (augassign_var = augassign_rule(p)) // augassign + && + (_tmp_130_var = _tmp_130_rule(p)) // yield_expr | star_expressions + ) + { + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "'%s' is an illegal expression for augmented assignment" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_block: NEWLINE !INDENT +static void * +invalid_block_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // NEWLINE !INDENT + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE !INDENT")); + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + && + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT + ) + { + D(fprintf(stderr, "%*c+ invalid_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE !INDENT")); + _res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_block[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE !INDENT")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_comprehension: ('[' | '(' | '{') starred_expression for_if_clauses +static void * +invalid_comprehension_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ('[' | '(' | '{') starred_expression for_if_clauses + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); + void *_tmp_131_var; + expr_ty a; + asdl_seq* for_if_clauses_var; + if ( + (_tmp_131_var = _tmp_131_rule(p)) // '[' | '(' | '{' + && + (a = starred_expression_rule(p)) // starred_expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + D(fprintf(stderr, "%*c+ invalid_comprehension[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "iterable unpacking cannot be used in comprehension" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_comprehension[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_dict_comprehension: '{' '**' bitwise_or for_if_clauses '}' +static void * +invalid_dict_comprehension_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '{' '**' bitwise_or for_if_clauses '}' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_dict_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); + Token * _literal; + Token * _literal_1; + Token * a; + expr_ty bitwise_or_var; + asdl_seq* for_if_clauses_var; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + && + (a = _PyPegen_expect_token(p, 35)) // token='**' + && + (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + && + (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' + ) + { + D(fprintf(stderr, "%*c+ invalid_dict_comprehension[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "dict unpacking cannot be used in dict comprehension" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_dict_comprehension[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_parameters: +// | param_no_default* (slash_with_default | param_with_default+) param_no_default +static void * +invalid_parameters_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // param_no_default* (slash_with_default | param_with_default+) param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); + asdl_seq * _loop0_132_var; + void *_tmp_133_var; + arg_ty param_no_default_var; + if ( + (_loop0_132_var = _loop0_132_rule(p)) // param_no_default* + && + (_tmp_133_var = _tmp_133_rule(p)) // slash_with_default | param_with_default+ + && + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + D(fprintf(stderr, "%*c+ invalid_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); + _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_lambda_parameters: +// | lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default +static void * +invalid_lambda_parameters_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); + asdl_seq * _loop0_134_var; + void *_tmp_135_var; + arg_ty lambda_param_no_default_var; + if ( + (_loop0_134_var = _loop0_134_rule(p)) // lambda_param_no_default* + && + (_tmp_135_var = _tmp_135_rule(p)) // lambda_slash_with_default | lambda_param_with_default+ + && + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + D(fprintf(stderr, "%*c+ invalid_lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); + _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_star_etc: '*' (')' | ',' (')' | '**')) | '*' ',' TYPE_COMMENT +static void * +invalid_star_etc_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '*' (')' | ',' (')' | '**')) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); + Token * _literal; + void *_tmp_136_var; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (_tmp_136_var = _tmp_136_rule(p)) // ')' | ',' (')' | '**') + ) + { + D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); + } + { // '*' ',' TYPE_COMMENT + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' TYPE_COMMENT")); + Token * _literal; + Token * _literal_1; + Token * type_comment_var; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' + && + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + ) + { + D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' TYPE_COMMENT")); + _res = RAISE_SYNTAX_ERROR ( "bare * has associated type comment" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' TYPE_COMMENT")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_lambda_star_etc: '*' (':' | ',' (':' | '**')) +static void * +invalid_lambda_star_etc_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '*' (':' | ',' (':' | '**')) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); + Token * _literal; + void *_tmp_137_var; + if ( + (_literal = _PyPegen_expect_token(p, 16)) // token='*' + && + (_tmp_137_var = _tmp_137_rule(p)) // ':' | ',' (':' | '**') + ) + { + D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); + _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT +static void * +invalid_double_type_comments_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_double_type_comments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); + Token * indent_var; + Token * newline_var; + Token * newline_var_1; + Token * type_comment_var; + Token * type_comment_var_1; + if ( + (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + && + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + && + (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' + && + (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' + ) + { + D(fprintf(stderr, "%*c+ invalid_double_type_comments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); + _res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_double_type_comments[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_del_target: star_expression &del_target_end +static void * +invalid_del_target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_expression &del_target_end + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); + expr_ty a; + if ( + (a = star_expression_rule(p)) // star_expression + && + _PyPegen_lookahead(1, del_target_end_rule, p) + ) + { + D(fprintf(stderr, "%*c+ invalid_del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_del_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression &del_target_end")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_import_from_targets: import_from_as_names ',' +static void * +invalid_import_from_targets_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // import_from_as_names ',' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','")); + Token * _literal; + asdl_seq* import_from_as_names_var; + if ( + (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + ) + { + D(fprintf(stderr, "%*c+ invalid_import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','")); + _res = RAISE_SYNTAX_ERROR ( "trailing comma not allowed without surrounding parentheses" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names ','")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_1: NEWLINE +static asdl_seq * +_loop0_1_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_1[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + Token * newline_var; + while ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_1[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_1_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_2: NEWLINE +static asdl_seq * +_loop0_2_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_2[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + Token * newline_var; + while ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + _res = newline_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_2[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_2_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_4: ',' expression +static asdl_seq * +_loop0_4_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = expression_rule(p)) // expression + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_4[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_4_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_3: expression _loop0_4 +static asdl_seq * +_gather_3_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // expression _loop0_4 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_4")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_4_rule(p)) // _loop0_4 + ) + { + D(fprintf(stderr, "%*c+ _gather_3[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_4")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_3[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_4")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_6: ',' expression +static asdl_seq * +_loop0_6_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = expression_rule(p)) // expression + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_6[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_6_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_5: expression _loop0_6 +static asdl_seq * +_gather_5_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // expression _loop0_6 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_6")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_6_rule(p)) // _loop0_6 + ) + { + D(fprintf(stderr, "%*c+ _gather_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_6")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_5[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_6")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_8: ',' expression +static asdl_seq * +_loop0_8_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = expression_rule(p)) // expression + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_8[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_8_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_7: expression _loop0_8 +static asdl_seq * +_gather_7_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // expression _loop0_8 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_8")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_8_rule(p)) // _loop0_8 + ) + { + D(fprintf(stderr, "%*c+ _gather_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_8")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_7[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_8")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_10: ',' expression +static asdl_seq * +_loop0_10_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = expression_rule(p)) // expression + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_10[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_10_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_9: expression _loop0_10 +static asdl_seq * +_gather_9_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // expression _loop0_10 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_10")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = expression_rule(p)) // expression + && + (seq = _loop0_10_rule(p)) // _loop0_10 + ) + { + D(fprintf(stderr, "%*c+ _gather_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_10")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_9[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_10")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_11: statement +static asdl_seq * +_loop1_11_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // statement + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement")); + asdl_seq* statement_var; + while ( + (statement_var = statement_rule(p)) // statement + ) + { + _res = statement_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_11[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_11_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_13: ';' small_stmt +static asdl_seq * +_loop0_13_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ';' small_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' small_stmt")); + Token * _literal; + stmt_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 13)) // token=';' + && + (elem = small_stmt_rule(p)) // small_stmt + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_13[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';' small_stmt")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_13_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_12: small_stmt _loop0_13 +static asdl_seq * +_gather_12_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // small_stmt _loop0_13 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "small_stmt _loop0_13")); + stmt_ty elem; + asdl_seq * seq; + if ( + (elem = small_stmt_rule(p)) // small_stmt + && + (seq = _loop0_13_rule(p)) // _loop0_13 + ) + { + D(fprintf(stderr, "%*c+ _gather_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "small_stmt _loop0_13")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_12[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "small_stmt _loop0_13")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_14: 'import' | 'from' +static void * +_tmp_14_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'import' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 513)) // token='import' + ) + { + D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); + _res = _keyword; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import'")); + } + { // 'from' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' + ) + { + D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); + _res = _keyword; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_15: 'def' | '@' | ASYNC +static void * +_tmp_15_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'def' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 522)) // token='def' + ) + { + D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); + _res = _keyword; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def'")); + } + { // '@' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 49)) // token='@' + ) + { + D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); + } + { // ASYNC + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); + Token * async_var; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + ) + { + D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); + _res = async_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_16: 'class' | '@' +static void * +_tmp_16_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'class' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 523)) // token='class' + ) + { + D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); + _res = _keyword; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class'")); + } + { // '@' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 49)) // token='@' + ) + { + D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_17: 'with' | ASYNC +static void * +_tmp_17_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'with' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 519)) // token='with' + ) + { + D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); + _res = _keyword; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'")); + } + { // ASYNC + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); + Token * async_var; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + ) + { + D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); + _res = async_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_18: 'for' | ASYNC +static void * +_tmp_18_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'for' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); + Token * _keyword; + if ( + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' + ) + { + D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); + _res = _keyword; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'")); + } + { // ASYNC + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); + Token * async_var; + if ( + (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' + ) + { + D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); + _res = async_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_19: '=' annotated_rhs +static void * +_tmp_19_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '=' annotated_rhs + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + Token * _literal; + expr_ty d; + if ( + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (d = annotated_rhs_rule(p)) // annotated_rhs + ) + { + D(fprintf(stderr, "%*c+ _tmp_19[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + _res = d; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_19[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_20: '(' single_target ')' | single_subscript_attribute_target +static void * +_tmp_20_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '(' single_target ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); + Token * _literal; + Token * _literal_1; + expr_ty b; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (b = single_target_rule(p)) // single_target + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ _tmp_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); + _res = b; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_20[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'")); + } + { // single_subscript_attribute_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); + expr_ty single_subscript_attribute_target_var; + if ( + (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target + ) + { + D(fprintf(stderr, "%*c+ _tmp_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); + _res = single_subscript_attribute_target_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_20[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_21: '=' annotated_rhs +static void * +_tmp_21_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '=' annotated_rhs + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + Token * _literal; + expr_ty d; + if ( + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (d = annotated_rhs_rule(p)) // annotated_rhs + ) + { + D(fprintf(stderr, "%*c+ _tmp_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + _res = d; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_21[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_22: (star_targets '=') +static asdl_seq * +_loop1_22_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (star_targets '=') + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_138_var; + while ( + (_tmp_138_var = _tmp_138_rule(p)) // star_targets '=' + ) + { + _res = _tmp_138_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_22[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_22_type, _seq); + D(p->level--); + return _seq; +} + +// _tmp_23: yield_expr | star_expressions +static void * +_tmp_23_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_24: yield_expr | star_expressions +static void * +_tmp_24_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_26: ',' NAME +static asdl_seq * +_loop0_26_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = _PyPegen_name_token(p)) // NAME + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_26[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_26_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_25: NAME _loop0_26 +static asdl_seq * +_gather_25_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // NAME _loop0_26 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_26")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = _PyPegen_name_token(p)) // NAME + && + (seq = _loop0_26_rule(p)) // _loop0_26 + ) + { + D(fprintf(stderr, "%*c+ _gather_25[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_26")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_25[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_26")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_28: ',' NAME +static asdl_seq * +_loop0_28_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = _PyPegen_name_token(p)) // NAME + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_28_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_27: NAME _loop0_28 +static asdl_seq * +_gather_27_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // NAME _loop0_28 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_28")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = _PyPegen_name_token(p)) // NAME + && + (seq = _loop0_28_rule(p)) // _loop0_28 + ) + { + D(fprintf(stderr, "%*c+ _gather_27[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_28")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_27[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_28")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_29: ',' expression +static void * +_tmp_29_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + Token * _literal; + expr_ty z; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (z = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_29[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_30: ('.' | '...') +static asdl_seq * +_loop0_30_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ('.' | '...') + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); + void *_tmp_139_var; + while ( + (_tmp_139_var = _tmp_139_rule(p)) // '.' | '...' + ) + { + _res = _tmp_139_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_30[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_30_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_31: ('.' | '...') +static asdl_seq * +_loop1_31_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ('.' | '...') + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); + void *_tmp_140_var; + while ( + (_tmp_140_var = _tmp_140_rule(p)) // '.' | '...' + ) + { + _res = _tmp_140_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_31[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_31_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_33: ',' import_from_as_name +static asdl_seq * +_loop0_33_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' import_from_as_name + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name")); + Token * _literal; + alias_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = import_from_as_name_rule(p)) // import_from_as_name + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_33[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_33_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_32: import_from_as_name _loop0_33 +static asdl_seq * +_gather_32_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // import_from_as_name _loop0_33 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); + alias_ty elem; + asdl_seq * seq; + if ( + (elem = import_from_as_name_rule(p)) // import_from_as_name + && + (seq = _loop0_33_rule(p)) // _loop0_33 + ) + { + D(fprintf(stderr, "%*c+ _gather_32[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_32[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_33")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_34: 'as' NAME +static void * +_tmp_34_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'as' NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + Token * _keyword; + expr_ty z; + if ( + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' + && + (z = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_36: ',' dotted_as_name +static asdl_seq * +_loop0_36_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' dotted_as_name + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name")); + Token * _literal; + alias_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = dotted_as_name_rule(p)) // dotted_as_name + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_36[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_36_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_35: dotted_as_name _loop0_36 +static asdl_seq * +_gather_35_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // dotted_as_name _loop0_36 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); + alias_ty elem; + asdl_seq * seq; + if ( + (elem = dotted_as_name_rule(p)) // dotted_as_name + && + (seq = _loop0_36_rule(p)) // _loop0_36 + ) + { + D(fprintf(stderr, "%*c+ _gather_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_35[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_36")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_37: 'as' NAME +static void * +_tmp_37_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'as' NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + Token * _keyword; + expr_ty z; + if ( + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' + && + (z = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ _tmp_37[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_37[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_39: ',' with_item +static asdl_seq * +_loop0_39_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' with_item + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + Token * _literal; + withitem_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = with_item_rule(p)) // with_item + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_39_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_38: with_item _loop0_39 +static asdl_seq * +_gather_38_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // with_item _loop0_39 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); + withitem_ty elem; + asdl_seq * seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_39_rule(p)) // _loop0_39 + ) + { + D(fprintf(stderr, "%*c+ _gather_38[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_38[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_39")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_41: ',' with_item +static asdl_seq * +_loop0_41_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' with_item + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + Token * _literal; + withitem_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = with_item_rule(p)) // with_item + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_41[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_41_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_40: with_item _loop0_41 +static asdl_seq * +_gather_40_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // with_item _loop0_41 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); + withitem_ty elem; + asdl_seq * seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_41_rule(p)) // _loop0_41 + ) + { + D(fprintf(stderr, "%*c+ _gather_40[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_40[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_41")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_43: ',' with_item +static asdl_seq * +_loop0_43_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' with_item + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + Token * _literal; + withitem_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = with_item_rule(p)) // with_item + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_43[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_43_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_42: with_item _loop0_43 +static asdl_seq * +_gather_42_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // with_item _loop0_43 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); + withitem_ty elem; + asdl_seq * seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_43_rule(p)) // _loop0_43 + ) + { + D(fprintf(stderr, "%*c+ _gather_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_42[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_43")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_45: ',' with_item +static asdl_seq * +_loop0_45_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' with_item + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + Token * _literal; + withitem_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = with_item_rule(p)) // with_item + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_45_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_44: with_item _loop0_45 +static asdl_seq * +_gather_44_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // with_item _loop0_45 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); + withitem_ty elem; + asdl_seq * seq; + if ( + (elem = with_item_rule(p)) // with_item + && + (seq = _loop0_45_rule(p)) // _loop0_45 + ) + { + D(fprintf(stderr, "%*c+ _gather_44[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_44[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_45")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_46: 'as' target +static void * +_tmp_46_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'as' target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' target")); + Token * _keyword; + expr_ty t; + if ( + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' + && + (t = target_rule(p)) // target + ) + { + D(fprintf(stderr, "%*c+ _tmp_46[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' target")); + _res = t; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_46[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' target")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_47: except_block +static asdl_seq * +_loop1_47_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // except_block + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); + excepthandler_ty except_block_var; + while ( + (except_block_var = except_block_rule(p)) // except_block + ) + { + _res = except_block_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_47[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_47_type, _seq); + D(p->level--); + return _seq; +} + +// _tmp_48: 'as' NAME +static void * +_tmp_48_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'as' NAME + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + Token * _keyword; + expr_ty z; + if ( + (_keyword = _PyPegen_expect_token(p, 530)) // token='as' + && + (z = _PyPegen_name_token(p)) // NAME + ) + { + D(fprintf(stderr, "%*c+ _tmp_48[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_48[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_49: 'from' expression +static void * +_tmp_49_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'from' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression")); + Token * _keyword; + expr_ty z; + if ( + (_keyword = _PyPegen_expect_token(p, 514)) // token='from' + && + (z = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_49[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_49[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_50: '->' expression +static void * +_tmp_50_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '->' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + Token * _literal; + expr_ty z; + if ( + (_literal = _PyPegen_expect_token(p, 51)) // token='->' + && + (z = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_50[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_50[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_51: '->' expression +static void * +_tmp_51_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '->' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + Token * _literal; + expr_ty z; + if ( + (_literal = _PyPegen_expect_token(p, 51)) // token='->' + && + (z = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_51[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_52: NEWLINE INDENT +static void * +_tmp_52_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // NEWLINE INDENT + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + Token * indent_var; + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + && + (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' + ) + { + D(fprintf(stderr, "%*c+ _tmp_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + _res = _PyPegen_dummy_name(p, newline_var, indent_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_52[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_53: param_no_default +static asdl_seq * +_loop0_53_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + arg_ty param_no_default_var; + while ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_53_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_54: param_with_default +static asdl_seq * +_loop0_54_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + NameDefaultPair* param_with_default_var; + while ( + (param_with_default_var = param_with_default_rule(p)) // param_with_default + ) + { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_55: param_with_default +static asdl_seq * +_loop0_55_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + NameDefaultPair* param_with_default_var; + while ( + (param_with_default_var = param_with_default_rule(p)) // param_with_default + ) + { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_56: param_no_default +static asdl_seq * +_loop1_56_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + arg_ty param_no_default_var; + while ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_56[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_56_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_57: param_with_default +static asdl_seq * +_loop0_57_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + NameDefaultPair* param_with_default_var; + while ( + (param_with_default_var = param_with_default_rule(p)) // param_with_default + ) + { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_57_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_58: param_with_default +static asdl_seq * +_loop1_58_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + NameDefaultPair* param_with_default_var; + while ( + (param_with_default_var = param_with_default_rule(p)) // param_with_default + ) + { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_58[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_58_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_59: param_no_default +static asdl_seq * +_loop1_59_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + arg_ty param_no_default_var; + while ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_59[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_60: param_no_default +static asdl_seq * +_loop1_60_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + arg_ty param_no_default_var; + while ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_61: param_no_default +static asdl_seq * +_loop0_61_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + arg_ty param_no_default_var; + while ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_61[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_61_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_62: param_with_default +static asdl_seq * +_loop1_62_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + NameDefaultPair* param_with_default_var; + while ( + (param_with_default_var = param_with_default_rule(p)) // param_with_default + ) + { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_62_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_63: param_no_default +static asdl_seq * +_loop0_63_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + arg_ty param_no_default_var; + while ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_63[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_63_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_64: param_with_default +static asdl_seq * +_loop1_64_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + NameDefaultPair* param_with_default_var; + while ( + (param_with_default_var = param_with_default_rule(p)) // param_with_default + ) + { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_64[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_64_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_65: param_maybe_default +static asdl_seq * +_loop0_65_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_maybe_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + NameDefaultPair* param_maybe_default_var; + while ( + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default + ) + { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_65[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_65_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_66: param_maybe_default +static asdl_seq * +_loop1_66_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_maybe_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + NameDefaultPair* param_maybe_default_var; + while ( + (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default + ) + { + _res = param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_66[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_66_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_67: ('@' named_expression NEWLINE) +static asdl_seq * +_loop1_67_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ('@' named_expression NEWLINE) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); + void *_tmp_141_var; + while ( + (_tmp_141_var = _tmp_141_rule(p)) // '@' named_expression NEWLINE + ) + { + _res = _tmp_141_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_67[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); + D(p->level--); + return _seq; +} + +// _tmp_68: '(' arguments? ')' +static void * +_tmp_68_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '(' arguments? ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + Token * _literal; + Token * _literal_1; + void *z; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (z = arguments_rule(p), 1) // arguments? + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_70: ',' star_expression +static asdl_seq * +_loop0_70_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' star_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = star_expression_rule(p)) // star_expression + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_70[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_70_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_69: star_expression _loop0_70 +static asdl_seq * +_gather_69_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // star_expression _loop0_70 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = star_expression_rule(p)) // star_expression + && + (seq = _loop0_70_rule(p)) // _loop0_70 + ) + { + D(fprintf(stderr, "%*c+ _gather_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_69[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression _loop0_70")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_71: (',' star_expression) +static asdl_seq * +_loop1_71_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (',' star_expression) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); + void *_tmp_142_var; + while ( + (_tmp_142_var = _tmp_142_rule(p)) // ',' star_expression + ) + { + _res = _tmp_142_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_71[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_71_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_73: ',' star_named_expression +static asdl_seq * +_loop0_73_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' star_named_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = star_named_expression_rule(p)) // star_named_expression + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_73[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_73_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_72: star_named_expression _loop0_73 +static asdl_seq * +_gather_72_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // star_named_expression _loop0_73 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = star_named_expression_rule(p)) // star_named_expression + && + (seq = _loop0_73_rule(p)) // _loop0_73 + ) + { + D(fprintf(stderr, "%*c+ _gather_72[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_72[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_73")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_74: (',' expression) +static asdl_seq * +_loop1_74_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (',' expression) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); + void *_tmp_143_var; + while ( + (_tmp_143_var = _tmp_143_rule(p)) // ',' expression + ) + { + _res = _tmp_143_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_74[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_74_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_75: lambda_param_no_default +static asdl_seq * +_loop0_75_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_75_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_76: lambda_param_with_default +static asdl_seq * +_loop0_76_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_76[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_77: lambda_param_with_default +static asdl_seq * +_loop0_77_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_78: lambda_param_no_default +static asdl_seq * +_loop1_78_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_78[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_78_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_79: lambda_param_with_default +static asdl_seq * +_loop0_79_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_79_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_80: lambda_param_with_default +static asdl_seq * +_loop1_80_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_80_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_81: lambda_param_no_default +static asdl_seq * +_loop1_81_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_82: lambda_param_no_default +static asdl_seq * +_loop1_82_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_83: lambda_param_no_default +static asdl_seq * +_loop0_83_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_83_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_84: lambda_param_with_default +static asdl_seq * +_loop1_84_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_84_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_85: lambda_param_no_default +static asdl_seq * +_loop0_85_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_85[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_85_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_86: lambda_param_with_default +static asdl_seq * +_loop1_86_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_86_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_87: lambda_param_maybe_default +static asdl_seq * +_loop0_87_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_maybe_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + NameDefaultPair* lambda_param_maybe_default_var; + while ( + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default + ) + { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_87[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_87_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_88: lambda_param_maybe_default +static asdl_seq * +_loop1_88_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_maybe_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + NameDefaultPair* lambda_param_maybe_default_var; + while ( + (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default + ) + { + _res = lambda_param_maybe_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_88[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_88_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_89: ('or' conjunction) +static asdl_seq * +_loop1_89_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ('or' conjunction) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); + void *_tmp_144_var; + while ( + (_tmp_144_var = _tmp_144_rule(p)) // 'or' conjunction + ) + { + _res = _tmp_144_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_89[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_90: ('and' inversion) +static asdl_seq * +_loop1_90_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ('and' inversion) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); + void *_tmp_145_var; + while ( + (_tmp_145_var = _tmp_145_rule(p)) // 'and' inversion + ) + { + _res = _tmp_145_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_90[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); + D(p->level--); + return _seq; +} + +// _loop1_91: compare_op_bitwise_or_pair +static asdl_seq * +_loop1_91_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // compare_op_bitwise_or_pair + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair")); + CmpopExprPair* compare_op_bitwise_or_pair_var; + while ( + (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair + ) + { + _res = compare_op_bitwise_or_pair_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_91[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); + D(p->level--); + return _seq; +} + +// _tmp_92: '!=' +static void * +_tmp_92_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '!=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='")); + Token * tok; + if ( + (tok = _PyPegen_expect_token(p, 28)) // token='!=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='")); + _res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_94: ',' slice +static asdl_seq * +_loop0_94_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' slice + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' slice")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = slice_rule(p)) // slice + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_94[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' slice")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_94_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_93: slice _loop0_94 +static asdl_seq * +_gather_93_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // slice _loop0_94 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = slice_rule(p)) // slice + && + (seq = _loop0_94_rule(p)) // _loop0_94 + ) + { + D(fprintf(stderr, "%*c+ _gather_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_93[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice _loop0_94")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_95: ':' expression? +static void * +_tmp_95_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ':' expression? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?")); + Token * _literal; + void *d; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (d = expression_rule(p), 1) // expression? + ) + { + D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?")); + _res = d; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_96: tuple | group | genexp +static void * +_tmp_96_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // tuple + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + expr_ty tuple_var; + if ( + (tuple_var = tuple_rule(p)) // tuple + ) + { + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + _res = tuple_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); + } + { // group + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group")); + expr_ty group_var; + if ( + (group_var = group_rule(p)) // group + ) + { + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group")); + _res = group_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group")); + } + { // genexp + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); + expr_ty genexp_var; + if ( + (genexp_var = genexp_rule(p)) // genexp + ) + { + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); + _res = genexp_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_97: list | listcomp +static void * +_tmp_97_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // list + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + expr_ty list_var; + if ( + (list_var = list_rule(p)) // list + ) + { + D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + _res = list_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); + } + { // listcomp + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp")); + expr_ty listcomp_var; + if ( + (listcomp_var = listcomp_rule(p)) // listcomp + ) + { + D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp")); + _res = listcomp_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_98: dict | set | dictcomp | setcomp +static void * +_tmp_98_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // dict + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict")); + expr_ty dict_var; + if ( + (dict_var = dict_rule(p)) // dict + ) + { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict")); + _res = dict_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict")); + } + { // set + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set")); + expr_ty set_var; + if ( + (set_var = set_rule(p)) // set + ) + { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set")); + _res = set_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set")); + } + { // dictcomp + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp")); + expr_ty dictcomp_var; + if ( + (dictcomp_var = dictcomp_rule(p)) // dictcomp + ) + { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp")); + _res = dictcomp_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp")); + } + { // setcomp + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp")); + expr_ty setcomp_var; + if ( + (setcomp_var = setcomp_rule(p)) // setcomp + ) + { + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp")); + _res = setcomp_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_99: STRING +static asdl_seq * +_loop1_99_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // STRING + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); + expr_ty string_var; + while ( + (string_var = _PyPegen_string_token(p)) // STRING + ) + { + _res = string_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_99[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_99_type, _seq); + D(p->level--); + return _seq; +} + +// _tmp_100: star_named_expression ',' star_named_expressions? +static void * +_tmp_100_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_named_expression ',' star_named_expressions? + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); + Token * _literal; + expr_ty y; + void *z; + if ( + (y = star_named_expression_rule(p)) // star_named_expression + && + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (z = star_named_expressions_rule(p), 1) // star_named_expressions? + ) + { + D(fprintf(stderr, "%*c+ _tmp_100[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); + _res = _PyPegen_seq_insert_in_front ( p , y , z ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_100[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_101: yield_expr | named_expression +static void * +_tmp_101_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // named_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); + expr_ty named_expression_var; + if ( + (named_expression_var = named_expression_rule(p)) // named_expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); + _res = named_expression_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_103: ',' double_starred_kvpair +static asdl_seq * +_loop0_103_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' double_starred_kvpair + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); + Token * _literal; + KeyValuePair* elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_103[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_103_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_102: double_starred_kvpair _loop0_103 +static asdl_seq * +_gather_102_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // double_starred_kvpair _loop0_103 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); + KeyValuePair* elem; + asdl_seq * seq; + if ( + (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair + && + (seq = _loop0_103_rule(p)) // _loop0_103 + ) + { + D(fprintf(stderr, "%*c+ _gather_102[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_102[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_103")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop1_104: for_if_clause +static asdl_seq * +_loop1_104_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // for_if_clause + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause")); + comprehension_ty for_if_clause_var; + while ( + (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause + ) + { + _res = for_if_clause_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_104[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_104_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_105: ('if' disjunction) +static asdl_seq * +_loop0_105_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ('if' disjunction) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); + void *_tmp_146_var; + while ( + (_tmp_146_var = _tmp_146_rule(p)) // 'if' disjunction + ) + { + _res = _tmp_146_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_105[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_105_type, _seq); + D(p->level--); + return _seq; +} -/* For a description, see the comments at end of this file */ +// _loop0_106: ('if' disjunction) +static asdl_seq * +_loop0_106_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ('if' disjunction) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); + void *_tmp_147_var; + while ( + (_tmp_147_var = _tmp_147_rule(p)) // 'if' disjunction + ) + { + _res = _tmp_147_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_106[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); + D(p->level--); + return _seq; +} -/* XXX To do: error recovery */ +// _tmp_107: ',' args +static void * +_tmp_107_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' args + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); + Token * _literal; + expr_ty c; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (c = args_rule(p)) // args + ) + { + D(fprintf(stderr, "%*c+ _tmp_107[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); + _res = c; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_107[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -#include "Python.h" -#include "token.h" -#include "grammar.h" -#include "node.h" -#include "parser.h" -#include "errcode.h" -#include "graminit.h" +// _tmp_108: ',' args +static void * +_tmp_108_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' args + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); + Token * _literal; + expr_ty c; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (c = args_rule(p)) // args + ) + { + D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); + _res = c; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} +// _loop0_110: ',' kwarg_or_starred +static asdl_seq * +_loop0_110_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' kwarg_or_starred + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); + Token * _literal; + KeywordOrStarred* elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_110[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_110_type, _seq); + D(p->level--); + return _seq; +} -#ifdef Py_DEBUG -extern int Py_DebugFlag; -#define D(x) if (!Py_DebugFlag); else x -#else -#define D(x) -#endif +// _gather_109: kwarg_or_starred _loop0_110 +static asdl_seq * +_gather_109_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // kwarg_or_starred _loop0_110 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); + KeywordOrStarred* elem; + asdl_seq * seq; + if ( + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + && + (seq = _loop0_110_rule(p)) // _loop0_110 + ) + { + D(fprintf(stderr, "%*c+ _gather_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_109[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_110")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_112: ',' kwarg_or_double_starred +static asdl_seq * +_loop0_112_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' kwarg_or_double_starred + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + Token * _literal; + KeywordOrStarred* elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_112_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_111: kwarg_or_double_starred _loop0_112 +static asdl_seq * +_gather_111_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // kwarg_or_double_starred _loop0_112 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); + KeywordOrStarred* elem; + asdl_seq * seq; + if ( + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + && + (seq = _loop0_112_rule(p)) // _loop0_112 + ) + { + D(fprintf(stderr, "%*c+ _gather_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_111[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_112")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_114: ',' kwarg_or_starred +static asdl_seq * +_loop0_114_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' kwarg_or_starred + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); + Token * _literal; + KeywordOrStarred* elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_114[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_114_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_113: kwarg_or_starred _loop0_114 +static asdl_seq * +_gather_113_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // kwarg_or_starred _loop0_114 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); + KeywordOrStarred* elem; + asdl_seq * seq; + if ( + (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred + && + (seq = _loop0_114_rule(p)) // _loop0_114 + ) + { + D(fprintf(stderr, "%*c+ _gather_113[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_113[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_114")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_116: ',' kwarg_or_double_starred +static asdl_seq * +_loop0_116_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' kwarg_or_double_starred + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + Token * _literal; + KeywordOrStarred* elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_116[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_116_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_115: kwarg_or_double_starred _loop0_116 +static asdl_seq * +_gather_115_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // kwarg_or_double_starred _loop0_116 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); + KeywordOrStarred* elem; + asdl_seq * seq; + if ( + (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred + && + (seq = _loop0_116_rule(p)) // _loop0_116 + ) + { + D(fprintf(stderr, "%*c+ _gather_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_115[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_116")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_117: (',' star_target) +static asdl_seq * +_loop0_117_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (',' star_target) + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); + void *_tmp_148_var; + while ( + (_tmp_148_var = _tmp_148_rule(p)) // ',' star_target + ) + { + _res = _tmp_148_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_117[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); + D(p->level--); + return _seq; +} + +// _loop0_119: ',' star_target +static asdl_seq * +_loop0_119_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' star_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = star_target_rule(p)) // star_target + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_119_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_118: star_target _loop0_119 +static asdl_seq * +_gather_118_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // star_target _loop0_119 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = star_target_rule(p)) // star_target + && + (seq = _loop0_119_rule(p)) // _loop0_119 + ) + { + D(fprintf(stderr, "%*c+ _gather_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_118[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_119")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _tmp_120: !'*' star_target +static void * +_tmp_120_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // !'*' star_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + expr_ty star_target_var; + if ( + _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' + && + (star_target_var = star_target_rule(p)) // star_target + ) + { + D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + _res = star_target_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_122: ',' del_target +static asdl_seq * +_loop0_122_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' del_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = del_target_rule(p)) // del_target + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_122[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_122_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_121: del_target _loop0_122 +static asdl_seq * +_gather_121_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // del_target _loop0_122 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = del_target_rule(p)) // del_target + && + (seq = _loop0_122_rule(p)) // _loop0_122 + ) + { + D(fprintf(stderr, "%*c+ _gather_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_121[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_122")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_124: ',' target +static asdl_seq * +_loop0_124_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // ',' target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' target")); + Token * _literal; + expr_ty elem; + while ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (elem = target_rule(p)) // target + ) + { + _res = elem; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + PyMem_Free(_children); + D(p->level--); + return NULL; + } + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_124[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' target")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_124_type, _seq); + D(p->level--); + return _seq; +} + +// _gather_123: target _loop0_124 +static asdl_seq * +_gather_123_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + asdl_seq * _res = NULL; + int _mark = p->mark; + { // target _loop0_124 + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _gather_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); + expr_ty elem; + asdl_seq * seq; + if ( + (elem = target_rule(p)) // target + && + (seq = _loop0_124_rule(p)) // _loop0_124 + ) + { + D(fprintf(stderr, "%*c+ _gather_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); + _res = _PyPegen_seq_insert_in_front(p, elem, seq); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _gather_123[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "target _loop0_124")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} +// _tmp_125: args | expression for_if_clauses +static void * +_tmp_125_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // args + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); + expr_ty args_var; + if ( + (args_var = args_rule(p)) // args + ) + { + D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); + _res = args_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args")); + } + { // expression for_if_clauses + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + expr_ty expression_var; + asdl_seq* for_if_clauses_var; + if ( + (expression_var = expression_rule(p)) // expression + && + (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses + ) + { + D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -/* STACK DATA TYPE */ +// _loop0_126: star_named_expressions +static asdl_seq * +_loop0_126_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // star_named_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); + asdl_seq* star_named_expressions_var; + while ( + (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions + ) + { + _res = star_named_expressions_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_126[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_126_type, _seq); + D(p->level--); + return _seq; +} -static void s_reset(stack *); +// _tmp_127: '=' annotated_rhs +static void * +_tmp_127_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '=' annotated_rhs + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + Token * _literal; + expr_ty annotated_rhs_var; + if ( + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + && + (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs + ) + { + D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -static void -s_reset(stack *s) +// _loop0_128: (star_targets '=') +static asdl_seq * +_loop0_128_rule(Parser *p) { - s->s_top = &s->s_base[MAXSTACK]; + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (star_targets '=') + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_149_var; + while ( + (_tmp_149_var = _tmp_149_rule(p)) // star_targets '=' + ) + { + _res = _tmp_149_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_128_type, _seq); + D(p->level--); + return _seq; } -#define s_empty(s) ((s)->s_top == &(s)->s_base[MAXSTACK]) - -static int -s_push(stack *s, const dfa *d, node *parent) +// _loop0_129: (star_targets '=') +static asdl_seq * +_loop0_129_rule(Parser *p) { - stackentry *top; - if (s->s_top == s->s_base) { - fprintf(stderr, "s_push: parser stack overflow\n"); - return E_NOMEM; + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // (star_targets '=') + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_150_var; + while ( + (_tmp_150_var = _tmp_150_rule(p)) // star_targets '=' + ) + { + _res = _tmp_150_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } - top = --s->s_top; - top->s_dfa = d; - top->s_parent = parent; - top->s_state = 0; - return 0; + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_129_type, _seq); + D(p->level--); + return _seq; } -#ifdef Py_DEBUG - -static void -s_pop(stack *s) +// _tmp_130: yield_expr | star_expressions +static void * +_tmp_130_rule(Parser *p) { - if (s_empty(s)) { - Py_FatalError("parser stack underflow"); + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // yield_expr + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + expr_ty yield_expr_var; + if ( + (yield_expr_var = yield_expr_rule(p)) // yield_expr + ) + { + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + _res = yield_expr_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); + } + { // star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + expr_ty star_expressions_var; + if ( + (star_expressions_var = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + _res = star_expressions_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } - s->s_top++; + _res = NULL; + done: + D(p->level--); + return _res; } -#else /* !Py_DEBUG */ - -#define s_pop(s) (s)->s_top++ - -#endif - - -/* PARSER CREATION */ - -parser_state * -PyParser_New(grammar *g, int start) +// _tmp_131: '[' | '(' | '{' +static void * +_tmp_131_rule(Parser *p) { - parser_state *ps; + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '[' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 9)) // token='[' + ) + { + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); + } + { // '(' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + ) + { + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); + } + { // '{' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 25)) // token='{' + ) + { + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} - if (!g->g_accel) - PyGrammar_AddAccelerators(g); - ps = (parser_state *)PyMem_MALLOC(sizeof(parser_state)); - if (ps == NULL) +// _loop0_132: param_no_default +static asdl_seq * +_loop0_132_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); return NULL; - ps->p_grammar = g; -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD - ps->p_flags = 0; -#endif - ps->p_tree = PyNode_New(start); - if (ps->p_tree == NULL) { - PyMem_FREE(ps); + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + arg_ty param_no_default_var; + while ( + (param_no_default_var = param_no_default_rule(p)) // param_no_default + ) + { + _res = param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); return NULL; } - s_reset(&ps->p_stack); - (void) s_push(&ps->p_stack, PyGrammar_FindDFA(g, start), ps->p_tree); - return ps; + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_132_type, _seq); + D(p->level--); + return _seq; } -void -PyParser_Delete(parser_state *ps) +// _tmp_133: slash_with_default | param_with_default+ +static void * +_tmp_133_rule(Parser *p) { - /* NB If you want to save the parse tree, - you must set p_tree to NULL before calling delparser! */ - PyNode_Free(ps->p_tree); - PyMem_FREE(ps); + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // slash_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + SlashWithDefault* slash_with_default_var; + if ( + (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default + ) + { + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + _res = slash_with_default_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); + } + { // param_with_default+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + asdl_seq * _loop1_151_var; + if ( + (_loop1_151_var = _loop1_151_rule(p)) // param_with_default+ + ) + { + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + _res = _loop1_151_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+")); + } + _res = NULL; + done: + D(p->level--); + return _res; } +// _loop0_134: lambda_param_no_default +static asdl_seq * +_loop0_134_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_no_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + arg_ty lambda_param_no_default_var; + while ( + (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default + ) + { + _res = lambda_param_no_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop0_134_type, _seq); + D(p->level--); + return _seq; +} -/* PARSER STACK OPERATIONS */ +// _tmp_135: lambda_slash_with_default | lambda_param_with_default+ +static void * +_tmp_135_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // lambda_slash_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + SlashWithDefault* lambda_slash_with_default_var; + if ( + (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default + ) + { + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + _res = lambda_slash_with_default_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); + } + { // lambda_param_with_default+ + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + asdl_seq * _loop1_152_var; + if ( + (_loop1_152_var = _loop1_152_rule(p)) // lambda_param_with_default+ + ) + { + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + _res = _loop1_152_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -static int -shift(stack *s, int type, char *str, int newstate, int lineno, int col_offset, - int end_lineno, int end_col_offset) +// _tmp_136: ')' | ',' (')' | '**') +static void * +_tmp_136_rule(Parser *p) { - int err; - assert(!s_empty(s)); - err = PyNode_AddChild(s->s_top->s_parent, type, str, lineno, col_offset, - end_lineno, end_col_offset); - if (err) - return err; - s->s_top->s_state = newstate; - return 0; + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); + } + { // ',' (')' | '**') + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + Token * _literal; + void *_tmp_153_var; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_tmp_153_var = _tmp_153_rule(p)) // ')' | '**' + ) + { + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_153_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); + } + _res = NULL; + done: + D(p->level--); + return _res; } -static int -push(stack *s, int type, const dfa *d, int newstate, int lineno, int col_offset, - int end_lineno, int end_col_offset) +// _tmp_137: ':' | ',' (':' | '**') +static void * +_tmp_137_rule(Parser *p) { - int err; - node *n; - n = s->s_top->s_parent; - assert(!s_empty(s)); - err = PyNode_AddChild(n, type, (char *)NULL, lineno, col_offset, - end_lineno, end_col_offset); - if (err) - return err; - s->s_top->s_state = newstate; - return s_push(s, d, CHILD(n, NCH(n)-1)); + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); + } + { // ',' (':' | '**') + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + Token * _literal; + void *_tmp_154_var; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (_tmp_154_var = _tmp_154_rule(p)) // ':' | '**' + ) + { + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_154_var); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); + } + _res = NULL; + done: + D(p->level--); + return _res; } +// _tmp_138: star_targets '=' +static void * +_tmp_138_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_targets '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + Token * _literal; + expr_ty z; + if ( + (z = star_targets_rule(p)) // star_targets + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -/* PARSER PROPER */ +// _tmp_139: '.' | '...' +static void * +_tmp_139_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '.' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + ) + { + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); + } + { // '...' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 52)) // token='...' + ) + { + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -static int -classify(parser_state *ps, int type, const char *str) +// _tmp_140: '.' | '...' +static void * +_tmp_140_rule(Parser *p) { - grammar *g = ps->p_grammar; - int n = g->g_ll.ll_nlabels; + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '.' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 23)) // token='.' + ) + { + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); + } + { // '...' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 52)) // token='...' + ) + { + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} - if (type == NAME) { - const label *l = g->g_ll.ll_label; - int i; - for (i = n; i > 0; i--, l++) { - if (l->lb_type != NAME || l->lb_str == NULL || - l->lb_str[0] != str[0] || - strcmp(l->lb_str, str) != 0) - continue; -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD -#if 0 - /* Leaving this in as an example */ - if (!(ps->p_flags & CO_FUTURE_WITH_STATEMENT)) { - if (str[0] == 'w' && strcmp(str, "with") == 0) - break; /* not a keyword yet */ - else if (str[0] == 'a' && strcmp(str, "as") == 0) - break; /* not a keyword yet */ +// _tmp_141: '@' named_expression NEWLINE +static void * +_tmp_141_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '@' named_expression NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + Token * _literal; + expr_ty f; + Token * newline_var; + if ( + (_literal = _PyPegen_expect_token(p, 49)) // token='@' + && + (f = named_expression_rule(p)) // named_expression + && + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + _res = f; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; } -#endif -#endif - D(printf("It's a keyword\n")); - return n - i; + goto done; } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } + _res = NULL; + done: + D(p->level--); + return _res; +} - { - const label *l = g->g_ll.ll_label; - int i; - for (i = n; i > 0; i--, l++) { - if (l->lb_type == type && l->lb_str == NULL) { - D(printf("It's a token we know\n")); - return n - i; +// _tmp_142: ',' star_expression +static void * +_tmp_142_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' star_expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + Token * _literal; + expr_ty c; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (c = star_expression_rule(p)) // star_expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + _res = c; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; } + goto done; } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } - - D(printf("Illegal token\n")); - return -1; + _res = NULL; + done: + D(p->level--); + return _res; } -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD -#if 0 -/* Leaving this in as an example */ -static void -future_hack(parser_state *ps) +// _tmp_143: ',' expression +static void * +_tmp_143_rule(Parser *p) { - node *n = ps->p_stack.s_top->s_parent; - node *ch, *cch; - int i; - - /* from __future__ import ..., must have at least 4 children */ - n = CHILD(n, 0); - if (NCH(n) < 4) - return; - ch = CHILD(n, 0); - if (STR(ch) == NULL || strcmp(STR(ch), "from") != 0) - return; - ch = CHILD(n, 1); - if (NCH(ch) == 1 && STR(CHILD(ch, 0)) && - strcmp(STR(CHILD(ch, 0)), "__future__") != 0) - return; - ch = CHILD(n, 3); - /* ch can be a star, a parenthesis or import_as_names */ - if (TYPE(ch) == STAR) - return; - if (TYPE(ch) == LPAR) - ch = CHILD(n, 4); - - for (i = 0; i < NCH(ch); i += 2) { - cch = CHILD(ch, i); - if (NCH(cch) >= 1 && TYPE(CHILD(cch, 0)) == NAME) { - char *str_ch = STR(CHILD(cch, 0)); - if (strcmp(str_ch, FUTURE_WITH_STATEMENT) == 0) { - ps->p_flags |= CO_FUTURE_WITH_STATEMENT; - } else if (strcmp(str_ch, FUTURE_PRINT_FUNCTION) == 0) { - ps->p_flags |= CO_FUTURE_PRINT_FUNCTION; - } else if (strcmp(str_ch, FUTURE_UNICODE_LITERALS) == 0) { - ps->p_flags |= CO_FUTURE_UNICODE_LITERALS; + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ',' expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + Token * _literal; + expr_ty c; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (c = expression_rule(p)) // expression + ) + { + D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + _res = c; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; } + goto done; } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } + _res = NULL; + done: + D(p->level--); + return _res; } -#endif -#endif /* future keyword */ - -int -PyParser_AddToken(parser_state *ps, int type, char *str, - int lineno, int col_offset, - int end_lineno, int end_col_offset, - int *expected_ret) -{ - int ilabel; - int err; - - D(printf("Token %s/'%s' ... ", _PyParser_TokenNames[type], str)); - - /* Find out which label this token is */ - ilabel = classify(ps, type, str); - if (ilabel < 0) - return E_SYNTAX; - - /* Loop until the token is shifted or an error occurred */ - for (;;) { - /* Fetch the current dfa and state */ - const dfa *d = ps->p_stack.s_top->s_dfa; - state *s = &d->d_state[ps->p_stack.s_top->s_state]; - - D(printf(" DFA '%s', state %d:", - d->d_name, ps->p_stack.s_top->s_state)); - - /* Check accelerator */ - if (s->s_lower <= ilabel && ilabel < s->s_upper) { - int x = s->s_accel[ilabel - s->s_lower]; - if (x != -1) { - if (x & (1<<7)) { - /* Push non-terminal */ - int nt = (x >> 8) + NT_OFFSET; - int arrow = x & ((1<<7)-1); - if (nt == func_body_suite && !(ps->p_flags & PyCF_TYPE_COMMENTS)) { - /* When parsing type comments is not requested, - we can provide better errors about bad indentation - by using 'suite' for the body of a funcdef */ - D(printf(" [switch func_body_suite to suite]")); - nt = suite; - } - const dfa *d1 = PyGrammar_FindDFA( - ps->p_grammar, nt); - if ((err = push(&ps->p_stack, nt, d1, - arrow, lineno, col_offset, - end_lineno, end_col_offset)) > 0) { - D(printf(" MemError: push\n")); - return err; - } - D(printf(" Push '%s'\n", d1->d_name)); - continue; - } - - /* Shift the token */ - if ((err = shift(&ps->p_stack, type, str, - x, lineno, col_offset, - end_lineno, end_col_offset)) > 0) { - D(printf(" MemError: shift.\n")); - return err; - } - D(printf(" Shift.\n")); - /* Pop while we are in an accept-only state */ - while (s = &d->d_state - [ps->p_stack.s_top->s_state], - s->s_accept && s->s_narcs == 1) { - D(printf(" DFA '%s', state %d: " - "Direct pop.\n", - d->d_name, - ps->p_stack.s_top->s_state)); -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD -#if 0 - if (d->d_name[0] == 'i' && - strcmp(d->d_name, - "import_stmt") == 0) - future_hack(ps); -#endif -#endif - s_pop(&ps->p_stack); - if (s_empty(&ps->p_stack)) { - D(printf(" ACCEPT.\n")); - return E_DONE; - } - d = ps->p_stack.s_top->s_dfa; - } - return E_OK; + +// _tmp_144: 'or' conjunction +static void * +_tmp_144_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'or' conjunction + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + Token * _keyword; + expr_ty c; + if ( + (_keyword = _PyPegen_expect_token(p, 531)) // token='or' + && + (c = conjunction_rule(p)) // conjunction + ) + { + D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + _res = c; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; } + goto done; } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} - if (s->s_accept) { -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD -#if 0 - if (d->d_name[0] == 'i' && - strcmp(d->d_name, "import_stmt") == 0) - future_hack(ps); -#endif -#endif - /* Pop this dfa and try again */ - s_pop(&ps->p_stack); - D(printf(" Pop ...\n")); - if (s_empty(&ps->p_stack)) { - D(printf(" Error: bottom of stack.\n")); - return E_SYNTAX; +// _tmp_145: 'and' inversion +static void * +_tmp_145_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'and' inversion + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + Token * _keyword; + expr_ty c; + if ( + (_keyword = _PyPegen_expect_token(p, 532)) // token='and' + && + (c = inversion_rule(p)) // inversion + ) + { + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + _res = c; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; } - continue; + goto done; } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} - /* Stuck, report syntax error */ - D(printf(" Error.\n")); - if (expected_ret) { - if (s->s_lower == s->s_upper - 1) { - /* Only one possible expected token */ - *expected_ret = ps->p_grammar-> - g_ll.ll_label[s->s_lower].lb_type; +// _tmp_146: 'if' disjunction +static void * +_tmp_146_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'if' disjunction + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + Token * _keyword; + expr_ty z; + if ( + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' + && + (z = disjunction_rule(p)) // disjunction + ) + { + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; } - else - *expected_ret = -1; + goto done; } - return E_SYNTAX; + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } + _res = NULL; + done: + D(p->level--); + return _res; } - -#ifdef Py_DEBUG - -/* DEBUG OUTPUT */ - -void -dumptree(grammar *g, node *n) +// _tmp_147: 'if' disjunction +static void * +_tmp_147_rule(Parser *p) { - int i; - - if (n == NULL) - printf("NIL"); - else { - label l; - l.lb_type = TYPE(n); - l.lb_str = STR(n); - printf("%s", PyGrammar_LabelRepr(&l)); - if (ISNONTERMINAL(TYPE(n))) { - printf("("); - for (i = 0; i < NCH(n); i++) { - if (i > 0) - printf(","); - dumptree(g, CHILD(n, i)); + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'if' disjunction + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + Token * _keyword; + expr_ty z; + if ( + (_keyword = _PyPegen_expect_token(p, 510)) // token='if' + && + (z = disjunction_rule(p)) // disjunction + ) + { + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + _res = z; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; } - printf(")"); + goto done; } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } + _res = NULL; + done: + D(p->level--); + return _res; } -void -showtree(grammar *g, node *n) +// _tmp_148: ',' star_target +static void * +_tmp_148_rule(Parser *p) { - int i; - - if (n == NULL) - return; - if (ISNONTERMINAL(TYPE(n))) { - for (i = 0; i < NCH(n); i++) - showtree(g, CHILD(n, i)); + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; } - else if (ISTERMINAL(TYPE(n))) { - printf("%s", _PyParser_TokenNames[TYPE(n)]); - if (TYPE(n) == NUMBER || TYPE(n) == NAME) - printf("(%s)", STR(n)); - printf(" "); + void * _res = NULL; + int _mark = p->mark; + { // ',' star_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + Token * _literal; + expr_ty c; + if ( + (_literal = _PyPegen_expect_token(p, 12)) // token=',' + && + (c = star_target_rule(p)) // star_target + ) + { + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + _res = c; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } - else - printf("? "); + _res = NULL; + done: + D(p->level--); + return _res; } -void -printtree(parser_state *ps) +// _tmp_149: star_targets '=' +static void * +_tmp_149_rule(Parser *p) { - if (Py_DebugFlag) { - printf("Parse tree:\n"); - dumptree(ps->p_grammar, ps->p_tree); - printf("\n"); - printf("Tokens:\n"); - showtree(ps->p_grammar, ps->p_tree); - printf("\n"); + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_targets '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + Token * _literal; + expr_ty star_targets_var; + if ( + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + _res = _PyPegen_dummy_name(p, star_targets_var, _literal); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } - printf("Listing:\n"); - PyNode_ListTree(ps->p_tree); - printf("\n"); + _res = NULL; + done: + D(p->level--); + return _res; } -#endif /* Py_DEBUG */ - -/* - -Description ------------ - -The parser's interface is different than usual: the function addtoken() -must be called for each token in the input. This makes it possible to -turn it into an incremental parsing system later. The parsing system -constructs a parse tree as it goes. - -A parsing rule is represented as a Deterministic Finite-state Automaton -(DFA). A node in a DFA represents a state of the parser; an arc represents -a transition. Transitions are either labeled with terminal symbols or -with non-terminals. When the parser decides to follow an arc labeled -with a non-terminal, it is invoked recursively with the DFA representing -the parsing rule for that as its initial state; when that DFA accepts, -the parser that invoked it continues. The parse tree constructed by the -recursively called parser is inserted as a child in the current parse tree. +// _tmp_150: star_targets '=' +static void * +_tmp_150_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // star_targets '=' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + Token * _literal; + expr_ty star_targets_var; + if ( + (star_targets_var = star_targets_rule(p)) // star_targets + && + (_literal = _PyPegen_expect_token(p, 22)) // token='=' + ) + { + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + _res = _PyPegen_dummy_name(p, star_targets_var, _literal); + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -The DFA's can be constructed automatically from a more conventional -language description. An extended LL(1) grammar (ELL(1)) is suitable. -Certain restrictions make the parser's life easier: rules that can produce -the empty string should be outlawed (there are other ways to put loops -or optional parts in the language). To avoid the need to construct -FIRST sets, we can require that all but the last alternative of a rule -(really: arc going out of a DFA's state) must begin with a terminal -symbol. +// _loop1_151: param_with_default +static asdl_seq * +_loop1_151_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + NameDefaultPair* param_with_default_var; + while ( + (param_with_default_var = param_with_default_rule(p)) // param_with_default + ) + { + _res = param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_151[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_151_type, _seq); + D(p->level--); + return _seq; +} -As an example, consider this grammar: +// _loop1_152: lambda_param_with_default +static asdl_seq * +_loop1_152_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void *_res = NULL; + int _mark = p->mark; + int _start_mark = p->mark; + void **_children = PyMem_Malloc(sizeof(void *)); + if (!_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + ssize_t _children_capacity = 1; + ssize_t _n = 0; + { // lambda_param_with_default + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _loop1_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + NameDefaultPair* lambda_param_with_default_var; + while ( + (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default + ) + { + _res = lambda_param_with_default_var; + if (_n == _children_capacity) { + _children_capacity *= 2; + void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); + if (!_new_children) { + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + _children = _new_children; + } + _children[_n++] = _res; + _mark = p->mark; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _loop1_152[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); + } + if (_n == 0 || p->error_indicator) { + PyMem_Free(_children); + D(p->level--); + return NULL; + } + asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); + if (!_seq) { + PyMem_Free(_children); + p->error_indicator = 1; + PyErr_NoMemory(); + D(p->level--); + return NULL; + } + for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); + PyMem_Free(_children); + _PyPegen_insert_memo(p, _start_mark, _loop1_152_type, _seq); + D(p->level--); + return _seq; +} -expr: term (OP term)* -term: CONSTANT | '(' expr ')' +// _tmp_153: ')' | '**' +static void * +_tmp_153_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); + } + { // '**' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + ) + { + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -The DFA corresponding to the rule for expr is: +// _tmp_154: ':' | '**' +static void * +_tmp_154_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); + } + { // '**' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 35)) // token='**' + ) + { + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} -------->.---term-->.-------> - ^ | - | | - \----OP----/ +void * +_PyPegen_parse(Parser *p) +{ + // Initialize keywords + p->keywords = reserved_keywords; + p->n_keyword_lists = n_keyword_lists; -The parse tree generated for the input a+b is: + // Run parser + void *result = NULL; + if (p->start_rule == Py_file_input) { + result = file_rule(p); + } else if (p->start_rule == Py_single_input) { + result = interactive_rule(p); + } else if (p->start_rule == Py_eval_input) { + result = eval_rule(p); + } else if (p->start_rule == Py_func_type_input) { + result = func_type_rule(p); + } else if (p->start_rule == Py_fstring_input) { + result = fstring_rule(p); + } -(expr: (term: (NAME: a)), (OP: +), (term: (NAME: b))) + return result; +} -*/ +// The end diff --git a/Parser/parser.h b/Parser/parser.h deleted file mode 100644 index b16075e7f29f2..0000000000000 --- a/Parser/parser.h +++ /dev/null @@ -1,49 +0,0 @@ -#ifndef Py_PARSER_H -#define Py_PARSER_H -#ifdef __cplusplus -extern "C" { -#endif - - -/* Parser interface */ - -#define MAXSTACK 1700 - -typedef struct { - int s_state; /* State in current DFA */ - const dfa *s_dfa; /* Current DFA */ - struct _node *s_parent; /* Where to add next node */ -} stackentry; - -typedef struct { - stackentry *s_top; /* Top entry */ - stackentry s_base[MAXSTACK];/* Array of stack entries */ - /* NB The stack grows down */ -} stack; - -typedef struct { - stack p_stack; /* Stack of parser states */ - grammar *p_grammar; /* Grammar to use */ - node *p_tree; /* Top of parse tree */ -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD - unsigned long p_flags; /* see co_flags in Include/code.h */ -#endif -} parser_state; - -parser_state *PyParser_New(grammar *g, int start); -void PyParser_Delete(parser_state *ps); -int PyParser_AddToken(parser_state *ps, int type, char *str, - int lineno, int col_offset, - int end_lineno, int end_col_offset, - int *expected_ret); -void PyGrammar_AddAccelerators(grammar *g); - - -#define showtree _Py_showtree -#define printtree _Py_printtree -#define dumptree _Py_dumptree - -#ifdef __cplusplus -} -#endif -#endif /* !Py_PARSER_H */ diff --git a/Parser/parsetok.c b/Parser/parsetok.c deleted file mode 100644 index 1ecb2c4a16df9..0000000000000 --- a/Parser/parsetok.c +++ /dev/null @@ -1,486 +0,0 @@ - -/* Parser-tokenizer link implementation */ - -#include "Python.h" -#include "tokenizer.h" -#include "node.h" -#include "grammar.h" -#include "parser.h" -#include "parsetok.h" -#include "errcode.h" -#include "graminit.h" - - -/* Forward */ -static node *parsetok(struct tok_state *, grammar *, int, perrdetail *, int *); -static int initerr(perrdetail *err_ret, PyObject * filename); - -typedef struct { - struct { - int lineno; - char *comment; - } *items; - size_t size; - size_t num_items; -} growable_comment_array; - -static int -growable_comment_array_init(growable_comment_array *arr, size_t initial_size) { - assert(initial_size > 0); - arr->items = malloc(initial_size * sizeof(*arr->items)); - arr->size = initial_size; - arr->num_items = 0; - - return arr->items != NULL; -} - -static int -growable_comment_array_add(growable_comment_array *arr, int lineno, char *comment) { - if (arr->num_items >= arr->size) { - size_t new_size = arr->size * 2; - void *new_items_array = realloc(arr->items, new_size * sizeof(*arr->items)); - if (!new_items_array) { - return 0; - } - arr->items = new_items_array; - arr->size = new_size; - } - - arr->items[arr->num_items].lineno = lineno; - arr->items[arr->num_items].comment = comment; - arr->num_items++; - return 1; -} - -static void -growable_comment_array_deallocate(growable_comment_array *arr) { - for (unsigned i = 0; i < arr->num_items; i++) { - PyObject_FREE(arr->items[i].comment); - } - free(arr->items); -} - -/* Parse input coming from a string. Return error code, print some errors. */ -node * -PyParser_ParseString(const char *s, grammar *g, int start, perrdetail *err_ret) -{ - return PyParser_ParseStringFlagsFilename(s, NULL, g, start, err_ret, 0); -} - -node * -PyParser_ParseStringFlags(const char *s, grammar *g, int start, - perrdetail *err_ret, int flags) -{ - return PyParser_ParseStringFlagsFilename(s, NULL, - g, start, err_ret, flags); -} - -node * -PyParser_ParseStringFlagsFilename(const char *s, const char *filename, - grammar *g, int start, - perrdetail *err_ret, int flags) -{ - int iflags = flags; - return PyParser_ParseStringFlagsFilenameEx(s, filename, g, start, - err_ret, &iflags); -} - -node * -PyParser_ParseStringObject(const char *s, PyObject *filename, - grammar *g, int start, - perrdetail *err_ret, int *flags) -{ - struct tok_state *tok; - int exec_input = start == file_input; - - if (initerr(err_ret, filename) < 0) - return NULL; - - if (PySys_Audit("compile", "yO", s, err_ret->filename) < 0) { - err_ret->error = E_ERROR; - return NULL; - } - - if (*flags & PyPARSE_IGNORE_COOKIE) - tok = PyTokenizer_FromUTF8(s, exec_input); - else - tok = PyTokenizer_FromString(s, exec_input); - if (tok == NULL) { - err_ret->error = PyErr_Occurred() ? E_DECODE : E_NOMEM; - return NULL; - } - if (*flags & PyPARSE_TYPE_COMMENTS) { - tok->type_comments = 1; - } - - Py_INCREF(err_ret->filename); - tok->filename = err_ret->filename; - if (*flags & PyPARSE_ASYNC_HACKS) - tok->async_hacks = 1; - return parsetok(tok, g, start, err_ret, flags); -} - -node * -PyParser_ParseStringFlagsFilenameEx(const char *s, const char *filename_str, - grammar *g, int start, - perrdetail *err_ret, int *flags) -{ - node *n; - PyObject *filename = NULL; - if (filename_str != NULL) { - filename = PyUnicode_DecodeFSDefault(filename_str); - if (filename == NULL) { - err_ret->error = E_ERROR; - return NULL; - } - } - n = PyParser_ParseStringObject(s, filename, g, start, err_ret, flags); - Py_XDECREF(filename); - return n; -} - -/* Parse input coming from a file. Return error code, print some errors. */ - -node * -PyParser_ParseFile(FILE *fp, const char *filename, grammar *g, int start, - const char *ps1, const char *ps2, - perrdetail *err_ret) -{ - return PyParser_ParseFileFlags(fp, filename, NULL, - g, start, ps1, ps2, err_ret, 0); -} - -node * -PyParser_ParseFileFlags(FILE *fp, const char *filename, const char *enc, - grammar *g, int start, - const char *ps1, const char *ps2, - perrdetail *err_ret, int flags) -{ - int iflags = flags; - return PyParser_ParseFileFlagsEx(fp, filename, enc, g, start, ps1, - ps2, err_ret, &iflags); -} - -node * -PyParser_ParseFileObject(FILE *fp, PyObject *filename, - const char *enc, grammar *g, int start, - const char *ps1, const char *ps2, - perrdetail *err_ret, int *flags) -{ - struct tok_state *tok; - - if (initerr(err_ret, filename) < 0) - return NULL; - - if (PySys_Audit("compile", "OO", Py_None, err_ret->filename) < 0) { - return NULL; - } - - if ((tok = PyTokenizer_FromFile(fp, enc, ps1, ps2)) == NULL) { - err_ret->error = E_NOMEM; - return NULL; - } - if (*flags & PyPARSE_TYPE_COMMENTS) { - tok->type_comments = 1; - } - Py_INCREF(err_ret->filename); - tok->filename = err_ret->filename; - return parsetok(tok, g, start, err_ret, flags); -} - -node * -PyParser_ParseFileFlagsEx(FILE *fp, const char *filename, - const char *enc, grammar *g, int start, - const char *ps1, const char *ps2, - perrdetail *err_ret, int *flags) -{ - node *n; - PyObject *fileobj = NULL; - if (filename != NULL) { - fileobj = PyUnicode_DecodeFSDefault(filename); - if (fileobj == NULL) { - err_ret->error = E_ERROR; - return NULL; - } - } - n = PyParser_ParseFileObject(fp, fileobj, enc, g, - start, ps1, ps2, err_ret, flags); - Py_XDECREF(fileobj); - return n; -} - -/* Parse input coming from the given tokenizer structure. - Return error code. */ - -static node * -parsetok(struct tok_state *tok, grammar *g, int start, perrdetail *err_ret, - int *flags) -{ - parser_state *ps; - node *n; - int started = 0; - int col_offset, end_col_offset; - growable_comment_array type_ignores; - - if (!growable_comment_array_init(&type_ignores, 10)) { - err_ret->error = E_NOMEM; - PyTokenizer_Free(tok); - return NULL; - } - - if ((ps = PyParser_New(g, start)) == NULL) { - err_ret->error = E_NOMEM; - growable_comment_array_deallocate(&type_ignores); - PyTokenizer_Free(tok); - return NULL; - } -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD - if (*flags & PyPARSE_BARRY_AS_BDFL) - ps->p_flags |= CO_FUTURE_BARRY_AS_BDFL; - if (*flags & PyPARSE_TYPE_COMMENTS) - ps->p_flags |= PyCF_TYPE_COMMENTS; -#endif - - for (;;) { - const char *a, *b; - int type; - size_t len; - char *str; - col_offset = -1; - int lineno; - const char *line_start; - - type = PyTokenizer_Get(tok, &a, &b); - - len = (a != NULL && b != NULL) ? b - a : 0; - str = (char *) PyObject_MALLOC(len + 1); - if (str == NULL) { - err_ret->error = E_NOMEM; - break; - } - if (len > 0) - strncpy(str, a, len); - str[len] = '\0'; - -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD - if (type == NOTEQUAL) { - if (!(ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) && - strcmp(str, "!=")) { - PyObject_FREE(str); - err_ret->error = E_SYNTAX; - break; - } - else if ((ps->p_flags & CO_FUTURE_BARRY_AS_BDFL) && - strcmp(str, "<>")) { - PyObject_FREE(str); - err_ret->expected = NOTEQUAL; - err_ret->error = E_SYNTAX; - break; - } - } -#endif - - /* Nodes of type STRING, especially multi line strings - must be handled differently in order to get both - the starting line number and the column offset right. - (cf. issue 16806) */ - lineno = type == STRING ? tok->first_lineno : tok->lineno; - line_start = type == STRING ? tok->multi_line_start : tok->line_start; - if (a != NULL && a >= line_start) { - col_offset = Py_SAFE_DOWNCAST(a - line_start, - intptr_t, int); - } - else { - col_offset = -1; - } - - if (b != NULL && b >= tok->line_start) { - end_col_offset = Py_SAFE_DOWNCAST(b - tok->line_start, - intptr_t, int); - } - else { - end_col_offset = -1; - } - - if (type == TYPE_IGNORE) { - if (!growable_comment_array_add(&type_ignores, tok->lineno, str)) { - err_ret->error = E_NOMEM; - break; - } - continue; - } - - if (type == ERRORTOKEN) { - err_ret->error = tok->done; - break; - } - if (type == ENDMARKER && started) { - type = NEWLINE; /* Add an extra newline */ - started = 0; - /* Add the right number of dedent tokens, - except if a certain flag is given -- - codeop.py uses this. */ - if (tok->indent && - !(*flags & PyPARSE_DONT_IMPLY_DEDENT)) - { - tok->pendin = -tok->indent; - tok->indent = 0; - } - } - else { - started = 1; - } - - if ((err_ret->error = - PyParser_AddToken(ps, (int)type, str, - lineno, col_offset, tok->lineno, end_col_offset, - &(err_ret->expected))) != E_OK) { - if (tok->done == E_EOF && !ISWHITESPACE(type)) { - tok->done = E_SYNTAX; - } - if (err_ret->error != E_DONE) { - PyObject_FREE(str); - err_ret->token = type; - } - break; - } - } - - if (err_ret->error == E_DONE) { - n = ps->p_tree; - ps->p_tree = NULL; - - if (n->n_type == file_input) { - /* Put type_ignore nodes in the ENDMARKER of file_input. */ - int num; - node *ch; - size_t i; - - num = NCH(n); - ch = CHILD(n, num - 1); - REQ(ch, ENDMARKER); - - for (i = 0; i < type_ignores.num_items; i++) { - int res = PyNode_AddChild(ch, TYPE_IGNORE, type_ignores.items[i].comment, - type_ignores.items[i].lineno, 0, - type_ignores.items[i].lineno, 0); - if (res != 0) { - err_ret->error = res; - PyNode_Free(n); - n = NULL; - break; - } - type_ignores.items[i].comment = NULL; - } - } - - /* Check that the source for a single input statement really - is a single statement by looking at what is left in the - buffer after parsing. Trailing whitespace and comments - are OK. */ - if (err_ret->error == E_DONE && start == single_input) { - const char *cur = tok->cur; - char c = *tok->cur; - - for (;;) { - while (c == ' ' || c == '\t' || c == '\n' || c == '\014') - c = *++cur; - - if (!c) - break; - - if (c != '#') { - err_ret->error = E_BADSINGLE; - PyNode_Free(n); - n = NULL; - break; - } - - /* Suck up comment. */ - while (c && c != '\n') - c = *++cur; - } - } - } - else - n = NULL; - - growable_comment_array_deallocate(&type_ignores); - -#ifdef PY_PARSER_REQUIRES_FUTURE_KEYWORD - *flags = ps->p_flags; -#endif - PyParser_Delete(ps); - - if (n == NULL) { - if (tok->done == E_EOF) - err_ret->error = E_EOF; - err_ret->lineno = tok->lineno; - if (tok->buf != NULL) { - size_t len; - assert(tok->cur - tok->buf < INT_MAX); - /* if we've managed to parse a token, point the offset to its start, - * else use the current reading position of the tokenizer - */ - err_ret->offset = col_offset != -1 ? col_offset + 1 : ((int)(tok->cur - tok->buf)); - len = tok->inp - tok->buf; - err_ret->text = (char *) PyObject_MALLOC(len + 1); - if (err_ret->text != NULL) { - if (len > 0) - strncpy(err_ret->text, tok->buf, len); - err_ret->text[len] = '\0'; - } - } - } else if (tok->encoding != NULL) { - /* 'nodes->n_str' uses PyObject_*, while 'tok->encoding' was - * allocated using PyMem_ - */ - node* r = PyNode_New(encoding_decl); - if (r) - r->n_str = PyObject_MALLOC(strlen(tok->encoding)+1); - if (!r || !r->n_str) { - err_ret->error = E_NOMEM; - if (r) - PyObject_FREE(r); - n = NULL; - goto done; - } - strcpy(r->n_str, tok->encoding); - PyMem_FREE(tok->encoding); - tok->encoding = NULL; - r->n_nchildren = 1; - r->n_child = n; - n = r; - } - -done: - PyTokenizer_Free(tok); - - if (n != NULL) { - _PyNode_FinalizeEndPos(n); - } - return n; -} - -static int -initerr(perrdetail *err_ret, PyObject *filename) -{ - err_ret->error = E_OK; - err_ret->lineno = 0; - err_ret->offset = 0; - err_ret->text = NULL; - err_ret->token = -1; - err_ret->expected = -1; - if (filename) { - Py_INCREF(filename); - err_ret->filename = filename; - } - else { - err_ret->filename = PyUnicode_FromString(""); - if (err_ret->filename == NULL) { - err_ret->error = E_ERROR; - return -1; - } - } - return 0; -} diff --git a/Parser/pegen/peg_api.c b/Parser/peg_api.c similarity index 98% rename from Parser/pegen/peg_api.c rename to Parser/peg_api.c index 5e71ecdb13cf0..b947c78076545 100644 --- a/Parser/pegen/peg_api.c +++ b/Parser/peg_api.c @@ -1,6 +1,6 @@ #include "pegen_interface.h" -#include "../tokenizer.h" +#include "tokenizer.h" #include "pegen.h" mod_ty diff --git a/Parser/pegen/pegen.c b/Parser/pegen.c similarity index 99% rename from Parser/pegen/pegen.c rename to Parser/pegen.c index 7b581cadfb64a..e29910bf86ed5 100644 --- a/Parser/pegen/pegen.c +++ b/Parser/pegen.c @@ -1,9 +1,9 @@ #include #include -#include "../tokenizer.h" +#include "tokenizer.h" #include "pegen.h" -#include "parse_string.h" +#include "string_parser.h" PyObject * _PyPegen_new_type_comment(Parser *p, char *s) diff --git a/Parser/pegen/pegen.h b/Parser/pegen.h similarity index 100% rename from Parser/pegen/pegen.h rename to Parser/pegen.h diff --git a/Parser/pegen/parse.c b/Parser/pegen/parse.c deleted file mode 100644 index d28e6c83aadb0..0000000000000 --- a/Parser/pegen/parse.c +++ /dev/null @@ -1,24415 +0,0 @@ -// @generated by pegen.py from ./Grammar/python.gram -#include "pegen.h" - -#if defined(Py_DEBUG) && defined(Py_BUILD_CORE) -extern int Py_DebugFlag; -#define D(x) if (Py_DebugFlag) x; -#else -#define D(x) -#endif -static const int n_keyword_lists = 9; -static KeywordToken *reserved_keywords[] = { - NULL, - NULL, - (KeywordToken[]) { - {"if", 510}, - {"in", 518}, - {"is", 526}, - {"as", 530}, - {"or", 531}, - {NULL, -1}, - }, - (KeywordToken[]) { - {"del", 503}, - {"try", 511}, - {"for", 517}, - {"def", 522}, - {"not", 525}, - {"and", 532}, - {NULL, -1}, - }, - (KeywordToken[]) { - {"pass", 502}, - {"from", 514}, - {"elif", 515}, - {"else", 516}, - {"with", 519}, - {"True", 527}, - {"None", 529}, - {NULL, -1}, - }, - (KeywordToken[]) { - {"raise", 501}, - {"yield", 504}, - {"break", 506}, - {"while", 512}, - {"class", 523}, - {"False", 528}, - {NULL, -1}, - }, - (KeywordToken[]) { - {"return", 500}, - {"assert", 505}, - {"global", 508}, - {"import", 513}, - {"except", 520}, - {"lambda", 524}, - {NULL, -1}, - }, - (KeywordToken[]) { - {"finally", 521}, - {NULL, -1}, - }, - (KeywordToken[]) { - {"continue", 507}, - {"nonlocal", 509}, - {NULL, -1}, - }, -}; -#define file_type 1000 -#define interactive_type 1001 -#define eval_type 1002 -#define func_type_type 1003 -#define fstring_type 1004 -#define type_expressions_type 1005 -#define statements_type 1006 -#define statement_type 1007 -#define statement_newline_type 1008 -#define simple_stmt_type 1009 -#define small_stmt_type 1010 -#define compound_stmt_type 1011 -#define assignment_type 1012 -#define augassign_type 1013 -#define global_stmt_type 1014 -#define nonlocal_stmt_type 1015 -#define yield_stmt_type 1016 -#define assert_stmt_type 1017 -#define del_stmt_type 1018 -#define import_stmt_type 1019 -#define import_name_type 1020 -#define import_from_type 1021 -#define import_from_targets_type 1022 -#define import_from_as_names_type 1023 -#define import_from_as_name_type 1024 -#define dotted_as_names_type 1025 -#define dotted_as_name_type 1026 -#define dotted_name_type 1027 // Left-recursive -#define if_stmt_type 1028 -#define elif_stmt_type 1029 -#define else_block_type 1030 -#define while_stmt_type 1031 -#define for_stmt_type 1032 -#define with_stmt_type 1033 -#define with_item_type 1034 -#define try_stmt_type 1035 -#define except_block_type 1036 -#define finally_block_type 1037 -#define return_stmt_type 1038 -#define raise_stmt_type 1039 -#define function_def_type 1040 -#define function_def_raw_type 1041 -#define func_type_comment_type 1042 -#define params_type 1043 -#define parameters_type 1044 -#define slash_no_default_type 1045 -#define slash_with_default_type 1046 -#define star_etc_type 1047 -#define kwds_type 1048 -#define param_no_default_type 1049 -#define param_with_default_type 1050 -#define param_maybe_default_type 1051 -#define param_type 1052 -#define annotation_type 1053 -#define default_type 1054 -#define decorators_type 1055 -#define class_def_type 1056 -#define class_def_raw_type 1057 -#define block_type 1058 -#define expressions_list_type 1059 -#define star_expressions_type 1060 -#define star_expression_type 1061 -#define star_named_expressions_type 1062 -#define star_named_expression_type 1063 -#define named_expression_type 1064 -#define annotated_rhs_type 1065 -#define expressions_type 1066 -#define expression_type 1067 -#define lambdef_type 1068 -#define lambda_params_type 1069 -#define lambda_parameters_type 1070 -#define lambda_slash_no_default_type 1071 -#define lambda_slash_with_default_type 1072 -#define lambda_star_etc_type 1073 -#define lambda_kwds_type 1074 -#define lambda_param_no_default_type 1075 -#define lambda_param_with_default_type 1076 -#define lambda_param_maybe_default_type 1077 -#define lambda_param_type 1078 -#define disjunction_type 1079 -#define conjunction_type 1080 -#define inversion_type 1081 -#define comparison_type 1082 -#define compare_op_bitwise_or_pair_type 1083 -#define eq_bitwise_or_type 1084 -#define noteq_bitwise_or_type 1085 -#define lte_bitwise_or_type 1086 -#define lt_bitwise_or_type 1087 -#define gte_bitwise_or_type 1088 -#define gt_bitwise_or_type 1089 -#define notin_bitwise_or_type 1090 -#define in_bitwise_or_type 1091 -#define isnot_bitwise_or_type 1092 -#define is_bitwise_or_type 1093 -#define bitwise_or_type 1094 // Left-recursive -#define bitwise_xor_type 1095 // Left-recursive -#define bitwise_and_type 1096 // Left-recursive -#define shift_expr_type 1097 // Left-recursive -#define sum_type 1098 // Left-recursive -#define term_type 1099 // Left-recursive -#define factor_type 1100 -#define power_type 1101 -#define await_primary_type 1102 -#define primary_type 1103 // Left-recursive -#define slices_type 1104 -#define slice_type 1105 -#define atom_type 1106 -#define strings_type 1107 -#define list_type 1108 -#define listcomp_type 1109 -#define tuple_type 1110 -#define group_type 1111 -#define genexp_type 1112 -#define set_type 1113 -#define setcomp_type 1114 -#define dict_type 1115 -#define dictcomp_type 1116 -#define double_starred_kvpairs_type 1117 -#define double_starred_kvpair_type 1118 -#define kvpair_type 1119 -#define for_if_clauses_type 1120 -#define for_if_clause_type 1121 -#define yield_expr_type 1122 -#define arguments_type 1123 -#define args_type 1124 -#define kwargs_type 1125 -#define starred_expression_type 1126 -#define kwarg_or_starred_type 1127 -#define kwarg_or_double_starred_type 1128 -#define star_targets_type 1129 -#define star_targets_seq_type 1130 -#define star_target_type 1131 -#define star_atom_type 1132 -#define single_target_type 1133 -#define single_subscript_attribute_target_type 1134 -#define del_targets_type 1135 -#define del_target_type 1136 -#define del_t_atom_type 1137 -#define del_target_end_type 1138 -#define targets_type 1139 -#define target_type 1140 -#define t_primary_type 1141 // Left-recursive -#define t_lookahead_type 1142 -#define t_atom_type 1143 -#define incorrect_arguments_type 1144 -#define invalid_kwarg_type 1145 -#define invalid_named_expression_type 1146 -#define invalid_assignment_type 1147 -#define invalid_block_type 1148 -#define invalid_comprehension_type 1149 -#define invalid_dict_comprehension_type 1150 -#define invalid_parameters_type 1151 -#define invalid_lambda_parameters_type 1152 -#define invalid_star_etc_type 1153 -#define invalid_lambda_star_etc_type 1154 -#define invalid_double_type_comments_type 1155 -#define invalid_del_target_type 1156 -#define invalid_import_from_targets_type 1157 -#define _loop0_1_type 1158 -#define _loop0_2_type 1159 -#define _loop0_4_type 1160 -#define _gather_3_type 1161 -#define _loop0_6_type 1162 -#define _gather_5_type 1163 -#define _loop0_8_type 1164 -#define _gather_7_type 1165 -#define _loop0_10_type 1166 -#define _gather_9_type 1167 -#define _loop1_11_type 1168 -#define _loop0_13_type 1169 -#define _gather_12_type 1170 -#define _tmp_14_type 1171 -#define _tmp_15_type 1172 -#define _tmp_16_type 1173 -#define _tmp_17_type 1174 -#define _tmp_18_type 1175 -#define _tmp_19_type 1176 -#define _tmp_20_type 1177 -#define _tmp_21_type 1178 -#define _loop1_22_type 1179 -#define _tmp_23_type 1180 -#define _tmp_24_type 1181 -#define _loop0_26_type 1182 -#define _gather_25_type 1183 -#define _loop0_28_type 1184 -#define _gather_27_type 1185 -#define _tmp_29_type 1186 -#define _loop0_30_type 1187 -#define _loop1_31_type 1188 -#define _loop0_33_type 1189 -#define _gather_32_type 1190 -#define _tmp_34_type 1191 -#define _loop0_36_type 1192 -#define _gather_35_type 1193 -#define _tmp_37_type 1194 -#define _loop0_39_type 1195 -#define _gather_38_type 1196 -#define _loop0_41_type 1197 -#define _gather_40_type 1198 -#define _loop0_43_type 1199 -#define _gather_42_type 1200 -#define _loop0_45_type 1201 -#define _gather_44_type 1202 -#define _tmp_46_type 1203 -#define _loop1_47_type 1204 -#define _tmp_48_type 1205 -#define _tmp_49_type 1206 -#define _tmp_50_type 1207 -#define _tmp_51_type 1208 -#define _tmp_52_type 1209 -#define _loop0_53_type 1210 -#define _loop0_54_type 1211 -#define _loop0_55_type 1212 -#define _loop1_56_type 1213 -#define _loop0_57_type 1214 -#define _loop1_58_type 1215 -#define _loop1_59_type 1216 -#define _loop1_60_type 1217 -#define _loop0_61_type 1218 -#define _loop1_62_type 1219 -#define _loop0_63_type 1220 -#define _loop1_64_type 1221 -#define _loop0_65_type 1222 -#define _loop1_66_type 1223 -#define _loop1_67_type 1224 -#define _tmp_68_type 1225 -#define _loop0_70_type 1226 -#define _gather_69_type 1227 -#define _loop1_71_type 1228 -#define _loop0_73_type 1229 -#define _gather_72_type 1230 -#define _loop1_74_type 1231 -#define _loop0_75_type 1232 -#define _loop0_76_type 1233 -#define _loop0_77_type 1234 -#define _loop1_78_type 1235 -#define _loop0_79_type 1236 -#define _loop1_80_type 1237 -#define _loop1_81_type 1238 -#define _loop1_82_type 1239 -#define _loop0_83_type 1240 -#define _loop1_84_type 1241 -#define _loop0_85_type 1242 -#define _loop1_86_type 1243 -#define _loop0_87_type 1244 -#define _loop1_88_type 1245 -#define _loop1_89_type 1246 -#define _loop1_90_type 1247 -#define _loop1_91_type 1248 -#define _tmp_92_type 1249 -#define _loop0_94_type 1250 -#define _gather_93_type 1251 -#define _tmp_95_type 1252 -#define _tmp_96_type 1253 -#define _tmp_97_type 1254 -#define _tmp_98_type 1255 -#define _loop1_99_type 1256 -#define _tmp_100_type 1257 -#define _tmp_101_type 1258 -#define _loop0_103_type 1259 -#define _gather_102_type 1260 -#define _loop1_104_type 1261 -#define _loop0_105_type 1262 -#define _loop0_106_type 1263 -#define _tmp_107_type 1264 -#define _tmp_108_type 1265 -#define _loop0_110_type 1266 -#define _gather_109_type 1267 -#define _loop0_112_type 1268 -#define _gather_111_type 1269 -#define _loop0_114_type 1270 -#define _gather_113_type 1271 -#define _loop0_116_type 1272 -#define _gather_115_type 1273 -#define _loop0_117_type 1274 -#define _loop0_119_type 1275 -#define _gather_118_type 1276 -#define _tmp_120_type 1277 -#define _loop0_122_type 1278 -#define _gather_121_type 1279 -#define _loop0_124_type 1280 -#define _gather_123_type 1281 -#define _tmp_125_type 1282 -#define _loop0_126_type 1283 -#define _tmp_127_type 1284 -#define _loop0_128_type 1285 -#define _loop0_129_type 1286 -#define _tmp_130_type 1287 -#define _tmp_131_type 1288 -#define _loop0_132_type 1289 -#define _tmp_133_type 1290 -#define _loop0_134_type 1291 -#define _tmp_135_type 1292 -#define _tmp_136_type 1293 -#define _tmp_137_type 1294 -#define _tmp_138_type 1295 -#define _tmp_139_type 1296 -#define _tmp_140_type 1297 -#define _tmp_141_type 1298 -#define _tmp_142_type 1299 -#define _tmp_143_type 1300 -#define _tmp_144_type 1301 -#define _tmp_145_type 1302 -#define _tmp_146_type 1303 -#define _tmp_147_type 1304 -#define _tmp_148_type 1305 -#define _tmp_149_type 1306 -#define _tmp_150_type 1307 -#define _loop1_151_type 1308 -#define _loop1_152_type 1309 -#define _tmp_153_type 1310 -#define _tmp_154_type 1311 - -static mod_ty file_rule(Parser *p); -static mod_ty interactive_rule(Parser *p); -static mod_ty eval_rule(Parser *p); -static mod_ty func_type_rule(Parser *p); -static expr_ty fstring_rule(Parser *p); -static asdl_seq* type_expressions_rule(Parser *p); -static asdl_seq* statements_rule(Parser *p); -static asdl_seq* statement_rule(Parser *p); -static asdl_seq* statement_newline_rule(Parser *p); -static asdl_seq* simple_stmt_rule(Parser *p); -static stmt_ty small_stmt_rule(Parser *p); -static stmt_ty compound_stmt_rule(Parser *p); -static stmt_ty assignment_rule(Parser *p); -static AugOperator* augassign_rule(Parser *p); -static stmt_ty global_stmt_rule(Parser *p); -static stmt_ty nonlocal_stmt_rule(Parser *p); -static stmt_ty yield_stmt_rule(Parser *p); -static stmt_ty assert_stmt_rule(Parser *p); -static stmt_ty del_stmt_rule(Parser *p); -static stmt_ty import_stmt_rule(Parser *p); -static stmt_ty import_name_rule(Parser *p); -static stmt_ty import_from_rule(Parser *p); -static asdl_seq* import_from_targets_rule(Parser *p); -static asdl_seq* import_from_as_names_rule(Parser *p); -static alias_ty import_from_as_name_rule(Parser *p); -static asdl_seq* dotted_as_names_rule(Parser *p); -static alias_ty dotted_as_name_rule(Parser *p); -static expr_ty dotted_name_rule(Parser *p); -static stmt_ty if_stmt_rule(Parser *p); -static stmt_ty elif_stmt_rule(Parser *p); -static asdl_seq* else_block_rule(Parser *p); -static stmt_ty while_stmt_rule(Parser *p); -static stmt_ty for_stmt_rule(Parser *p); -static stmt_ty with_stmt_rule(Parser *p); -static withitem_ty with_item_rule(Parser *p); -static stmt_ty try_stmt_rule(Parser *p); -static excepthandler_ty except_block_rule(Parser *p); -static asdl_seq* finally_block_rule(Parser *p); -static stmt_ty return_stmt_rule(Parser *p); -static stmt_ty raise_stmt_rule(Parser *p); -static stmt_ty function_def_rule(Parser *p); -static stmt_ty function_def_raw_rule(Parser *p); -static Token* func_type_comment_rule(Parser *p); -static arguments_ty params_rule(Parser *p); -static arguments_ty parameters_rule(Parser *p); -static asdl_seq* slash_no_default_rule(Parser *p); -static SlashWithDefault* slash_with_default_rule(Parser *p); -static StarEtc* star_etc_rule(Parser *p); -static arg_ty kwds_rule(Parser *p); -static arg_ty param_no_default_rule(Parser *p); -static NameDefaultPair* param_with_default_rule(Parser *p); -static NameDefaultPair* param_maybe_default_rule(Parser *p); -static arg_ty param_rule(Parser *p); -static expr_ty annotation_rule(Parser *p); -static expr_ty default_rule(Parser *p); -static asdl_seq* decorators_rule(Parser *p); -static stmt_ty class_def_rule(Parser *p); -static stmt_ty class_def_raw_rule(Parser *p); -static asdl_seq* block_rule(Parser *p); -static asdl_seq* expressions_list_rule(Parser *p); -static expr_ty star_expressions_rule(Parser *p); -static expr_ty star_expression_rule(Parser *p); -static asdl_seq* star_named_expressions_rule(Parser *p); -static expr_ty star_named_expression_rule(Parser *p); -static expr_ty named_expression_rule(Parser *p); -static expr_ty annotated_rhs_rule(Parser *p); -static expr_ty expressions_rule(Parser *p); -static expr_ty expression_rule(Parser *p); -static expr_ty lambdef_rule(Parser *p); -static arguments_ty lambda_params_rule(Parser *p); -static arguments_ty lambda_parameters_rule(Parser *p); -static asdl_seq* lambda_slash_no_default_rule(Parser *p); -static SlashWithDefault* lambda_slash_with_default_rule(Parser *p); -static StarEtc* lambda_star_etc_rule(Parser *p); -static arg_ty lambda_kwds_rule(Parser *p); -static arg_ty lambda_param_no_default_rule(Parser *p); -static NameDefaultPair* lambda_param_with_default_rule(Parser *p); -static NameDefaultPair* lambda_param_maybe_default_rule(Parser *p); -static arg_ty lambda_param_rule(Parser *p); -static expr_ty disjunction_rule(Parser *p); -static expr_ty conjunction_rule(Parser *p); -static expr_ty inversion_rule(Parser *p); -static expr_ty comparison_rule(Parser *p); -static CmpopExprPair* compare_op_bitwise_or_pair_rule(Parser *p); -static CmpopExprPair* eq_bitwise_or_rule(Parser *p); -static CmpopExprPair* noteq_bitwise_or_rule(Parser *p); -static CmpopExprPair* lte_bitwise_or_rule(Parser *p); -static CmpopExprPair* lt_bitwise_or_rule(Parser *p); -static CmpopExprPair* gte_bitwise_or_rule(Parser *p); -static CmpopExprPair* gt_bitwise_or_rule(Parser *p); -static CmpopExprPair* notin_bitwise_or_rule(Parser *p); -static CmpopExprPair* in_bitwise_or_rule(Parser *p); -static CmpopExprPair* isnot_bitwise_or_rule(Parser *p); -static CmpopExprPair* is_bitwise_or_rule(Parser *p); -static expr_ty bitwise_or_rule(Parser *p); -static expr_ty bitwise_xor_rule(Parser *p); -static expr_ty bitwise_and_rule(Parser *p); -static expr_ty shift_expr_rule(Parser *p); -static expr_ty sum_rule(Parser *p); -static expr_ty term_rule(Parser *p); -static expr_ty factor_rule(Parser *p); -static expr_ty power_rule(Parser *p); -static expr_ty await_primary_rule(Parser *p); -static expr_ty primary_rule(Parser *p); -static expr_ty slices_rule(Parser *p); -static expr_ty slice_rule(Parser *p); -static expr_ty atom_rule(Parser *p); -static expr_ty strings_rule(Parser *p); -static expr_ty list_rule(Parser *p); -static expr_ty listcomp_rule(Parser *p); -static expr_ty tuple_rule(Parser *p); -static expr_ty group_rule(Parser *p); -static expr_ty genexp_rule(Parser *p); -static expr_ty set_rule(Parser *p); -static expr_ty setcomp_rule(Parser *p); -static expr_ty dict_rule(Parser *p); -static expr_ty dictcomp_rule(Parser *p); -static asdl_seq* double_starred_kvpairs_rule(Parser *p); -static KeyValuePair* double_starred_kvpair_rule(Parser *p); -static KeyValuePair* kvpair_rule(Parser *p); -static asdl_seq* for_if_clauses_rule(Parser *p); -static comprehension_ty for_if_clause_rule(Parser *p); -static expr_ty yield_expr_rule(Parser *p); -static expr_ty arguments_rule(Parser *p); -static expr_ty args_rule(Parser *p); -static asdl_seq* kwargs_rule(Parser *p); -static expr_ty starred_expression_rule(Parser *p); -static KeywordOrStarred* kwarg_or_starred_rule(Parser *p); -static KeywordOrStarred* kwarg_or_double_starred_rule(Parser *p); -static expr_ty star_targets_rule(Parser *p); -static asdl_seq* star_targets_seq_rule(Parser *p); -static expr_ty star_target_rule(Parser *p); -static expr_ty star_atom_rule(Parser *p); -static expr_ty single_target_rule(Parser *p); -static expr_ty single_subscript_attribute_target_rule(Parser *p); -static asdl_seq* del_targets_rule(Parser *p); -static expr_ty del_target_rule(Parser *p); -static expr_ty del_t_atom_rule(Parser *p); -static void *del_target_end_rule(Parser *p); -static asdl_seq* targets_rule(Parser *p); -static expr_ty target_rule(Parser *p); -static expr_ty t_primary_rule(Parser *p); -static void *t_lookahead_rule(Parser *p); -static expr_ty t_atom_rule(Parser *p); -static void *incorrect_arguments_rule(Parser *p); -static void *invalid_kwarg_rule(Parser *p); -static void *invalid_named_expression_rule(Parser *p); -static void *invalid_assignment_rule(Parser *p); -static void *invalid_block_rule(Parser *p); -static void *invalid_comprehension_rule(Parser *p); -static void *invalid_dict_comprehension_rule(Parser *p); -static void *invalid_parameters_rule(Parser *p); -static void *invalid_lambda_parameters_rule(Parser *p); -static void *invalid_star_etc_rule(Parser *p); -static void *invalid_lambda_star_etc_rule(Parser *p); -static void *invalid_double_type_comments_rule(Parser *p); -static void *invalid_del_target_rule(Parser *p); -static void *invalid_import_from_targets_rule(Parser *p); -static asdl_seq *_loop0_1_rule(Parser *p); -static asdl_seq *_loop0_2_rule(Parser *p); -static asdl_seq *_loop0_4_rule(Parser *p); -static asdl_seq *_gather_3_rule(Parser *p); -static asdl_seq *_loop0_6_rule(Parser *p); -static asdl_seq *_gather_5_rule(Parser *p); -static asdl_seq *_loop0_8_rule(Parser *p); -static asdl_seq *_gather_7_rule(Parser *p); -static asdl_seq *_loop0_10_rule(Parser *p); -static asdl_seq *_gather_9_rule(Parser *p); -static asdl_seq *_loop1_11_rule(Parser *p); -static asdl_seq *_loop0_13_rule(Parser *p); -static asdl_seq *_gather_12_rule(Parser *p); -static void *_tmp_14_rule(Parser *p); -static void *_tmp_15_rule(Parser *p); -static void *_tmp_16_rule(Parser *p); -static void *_tmp_17_rule(Parser *p); -static void *_tmp_18_rule(Parser *p); -static void *_tmp_19_rule(Parser *p); -static void *_tmp_20_rule(Parser *p); -static void *_tmp_21_rule(Parser *p); -static asdl_seq *_loop1_22_rule(Parser *p); -static void *_tmp_23_rule(Parser *p); -static void *_tmp_24_rule(Parser *p); -static asdl_seq *_loop0_26_rule(Parser *p); -static asdl_seq *_gather_25_rule(Parser *p); -static asdl_seq *_loop0_28_rule(Parser *p); -static asdl_seq *_gather_27_rule(Parser *p); -static void *_tmp_29_rule(Parser *p); -static asdl_seq *_loop0_30_rule(Parser *p); -static asdl_seq *_loop1_31_rule(Parser *p); -static asdl_seq *_loop0_33_rule(Parser *p); -static asdl_seq *_gather_32_rule(Parser *p); -static void *_tmp_34_rule(Parser *p); -static asdl_seq *_loop0_36_rule(Parser *p); -static asdl_seq *_gather_35_rule(Parser *p); -static void *_tmp_37_rule(Parser *p); -static asdl_seq *_loop0_39_rule(Parser *p); -static asdl_seq *_gather_38_rule(Parser *p); -static asdl_seq *_loop0_41_rule(Parser *p); -static asdl_seq *_gather_40_rule(Parser *p); -static asdl_seq *_loop0_43_rule(Parser *p); -static asdl_seq *_gather_42_rule(Parser *p); -static asdl_seq *_loop0_45_rule(Parser *p); -static asdl_seq *_gather_44_rule(Parser *p); -static void *_tmp_46_rule(Parser *p); -static asdl_seq *_loop1_47_rule(Parser *p); -static void *_tmp_48_rule(Parser *p); -static void *_tmp_49_rule(Parser *p); -static void *_tmp_50_rule(Parser *p); -static void *_tmp_51_rule(Parser *p); -static void *_tmp_52_rule(Parser *p); -static asdl_seq *_loop0_53_rule(Parser *p); -static asdl_seq *_loop0_54_rule(Parser *p); -static asdl_seq *_loop0_55_rule(Parser *p); -static asdl_seq *_loop1_56_rule(Parser *p); -static asdl_seq *_loop0_57_rule(Parser *p); -static asdl_seq *_loop1_58_rule(Parser *p); -static asdl_seq *_loop1_59_rule(Parser *p); -static asdl_seq *_loop1_60_rule(Parser *p); -static asdl_seq *_loop0_61_rule(Parser *p); -static asdl_seq *_loop1_62_rule(Parser *p); -static asdl_seq *_loop0_63_rule(Parser *p); -static asdl_seq *_loop1_64_rule(Parser *p); -static asdl_seq *_loop0_65_rule(Parser *p); -static asdl_seq *_loop1_66_rule(Parser *p); -static asdl_seq *_loop1_67_rule(Parser *p); -static void *_tmp_68_rule(Parser *p); -static asdl_seq *_loop0_70_rule(Parser *p); -static asdl_seq *_gather_69_rule(Parser *p); -static asdl_seq *_loop1_71_rule(Parser *p); -static asdl_seq *_loop0_73_rule(Parser *p); -static asdl_seq *_gather_72_rule(Parser *p); -static asdl_seq *_loop1_74_rule(Parser *p); -static asdl_seq *_loop0_75_rule(Parser *p); -static asdl_seq *_loop0_76_rule(Parser *p); -static asdl_seq *_loop0_77_rule(Parser *p); -static asdl_seq *_loop1_78_rule(Parser *p); -static asdl_seq *_loop0_79_rule(Parser *p); -static asdl_seq *_loop1_80_rule(Parser *p); -static asdl_seq *_loop1_81_rule(Parser *p); -static asdl_seq *_loop1_82_rule(Parser *p); -static asdl_seq *_loop0_83_rule(Parser *p); -static asdl_seq *_loop1_84_rule(Parser *p); -static asdl_seq *_loop0_85_rule(Parser *p); -static asdl_seq *_loop1_86_rule(Parser *p); -static asdl_seq *_loop0_87_rule(Parser *p); -static asdl_seq *_loop1_88_rule(Parser *p); -static asdl_seq *_loop1_89_rule(Parser *p); -static asdl_seq *_loop1_90_rule(Parser *p); -static asdl_seq *_loop1_91_rule(Parser *p); -static void *_tmp_92_rule(Parser *p); -static asdl_seq *_loop0_94_rule(Parser *p); -static asdl_seq *_gather_93_rule(Parser *p); -static void *_tmp_95_rule(Parser *p); -static void *_tmp_96_rule(Parser *p); -static void *_tmp_97_rule(Parser *p); -static void *_tmp_98_rule(Parser *p); -static asdl_seq *_loop1_99_rule(Parser *p); -static void *_tmp_100_rule(Parser *p); -static void *_tmp_101_rule(Parser *p); -static asdl_seq *_loop0_103_rule(Parser *p); -static asdl_seq *_gather_102_rule(Parser *p); -static asdl_seq *_loop1_104_rule(Parser *p); -static asdl_seq *_loop0_105_rule(Parser *p); -static asdl_seq *_loop0_106_rule(Parser *p); -static void *_tmp_107_rule(Parser *p); -static void *_tmp_108_rule(Parser *p); -static asdl_seq *_loop0_110_rule(Parser *p); -static asdl_seq *_gather_109_rule(Parser *p); -static asdl_seq *_loop0_112_rule(Parser *p); -static asdl_seq *_gather_111_rule(Parser *p); -static asdl_seq *_loop0_114_rule(Parser *p); -static asdl_seq *_gather_113_rule(Parser *p); -static asdl_seq *_loop0_116_rule(Parser *p); -static asdl_seq *_gather_115_rule(Parser *p); -static asdl_seq *_loop0_117_rule(Parser *p); -static asdl_seq *_loop0_119_rule(Parser *p); -static asdl_seq *_gather_118_rule(Parser *p); -static void *_tmp_120_rule(Parser *p); -static asdl_seq *_loop0_122_rule(Parser *p); -static asdl_seq *_gather_121_rule(Parser *p); -static asdl_seq *_loop0_124_rule(Parser *p); -static asdl_seq *_gather_123_rule(Parser *p); -static void *_tmp_125_rule(Parser *p); -static asdl_seq *_loop0_126_rule(Parser *p); -static void *_tmp_127_rule(Parser *p); -static asdl_seq *_loop0_128_rule(Parser *p); -static asdl_seq *_loop0_129_rule(Parser *p); -static void *_tmp_130_rule(Parser *p); -static void *_tmp_131_rule(Parser *p); -static asdl_seq *_loop0_132_rule(Parser *p); -static void *_tmp_133_rule(Parser *p); -static asdl_seq *_loop0_134_rule(Parser *p); -static void *_tmp_135_rule(Parser *p); -static void *_tmp_136_rule(Parser *p); -static void *_tmp_137_rule(Parser *p); -static void *_tmp_138_rule(Parser *p); -static void *_tmp_139_rule(Parser *p); -static void *_tmp_140_rule(Parser *p); -static void *_tmp_141_rule(Parser *p); -static void *_tmp_142_rule(Parser *p); -static void *_tmp_143_rule(Parser *p); -static void *_tmp_144_rule(Parser *p); -static void *_tmp_145_rule(Parser *p); -static void *_tmp_146_rule(Parser *p); -static void *_tmp_147_rule(Parser *p); -static void *_tmp_148_rule(Parser *p); -static void *_tmp_149_rule(Parser *p); -static void *_tmp_150_rule(Parser *p); -static asdl_seq *_loop1_151_rule(Parser *p); -static asdl_seq *_loop1_152_rule(Parser *p); -static void *_tmp_153_rule(Parser *p); -static void *_tmp_154_rule(Parser *p); - - -// file: statements? $ -static mod_ty -file_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - mod_ty _res = NULL; - int _mark = p->mark; - { // statements? $ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> file[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statements? $")); - void *a; - Token * endmarker_var; - if ( - (a = statements_rule(p), 1) // statements? - && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - D(fprintf(stderr, "%*c+ file[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statements? $")); - _res = _PyPegen_make_module ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s file[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statements? $")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// interactive: statement_newline -static mod_ty -interactive_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - mod_ty _res = NULL; - int _mark = p->mark; - { // statement_newline - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> interactive[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement_newline")); - asdl_seq* a; - if ( - (a = statement_newline_rule(p)) // statement_newline - ) - { - D(fprintf(stderr, "%*c+ interactive[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement_newline")); - _res = Interactive ( a , p -> arena ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s interactive[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement_newline")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// eval: expressions NEWLINE* $ -static mod_ty -eval_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - mod_ty _res = NULL; - int _mark = p->mark; - { // expressions NEWLINE* $ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> eval[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expressions NEWLINE* $")); - asdl_seq * _loop0_1_var; - expr_ty a; - Token * endmarker_var; - if ( - (a = expressions_rule(p)) // expressions - && - (_loop0_1_var = _loop0_1_rule(p)) // NEWLINE* - && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - D(fprintf(stderr, "%*c+ eval[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expressions NEWLINE* $")); - _res = Expression ( a , p -> arena ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s eval[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expressions NEWLINE* $")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// func_type: '(' type_expressions? ')' '->' expression NEWLINE* $ -static mod_ty -func_type_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - mod_ty _res = NULL; - int _mark = p->mark; - { // '(' type_expressions? ')' '->' expression NEWLINE* $ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> func_type[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); - Token * _literal; - Token * _literal_1; - Token * _literal_2; - asdl_seq * _loop0_2_var; - void *a; - expr_ty b; - Token * endmarker_var; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = type_expressions_rule(p), 1) // type_expressions? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - && - (_literal_2 = _PyPegen_expect_token(p, 51)) // token='->' - && - (b = expression_rule(p)) // expression - && - (_loop0_2_var = _loop0_2_rule(p)) // NEWLINE* - && - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - D(fprintf(stderr, "%*c+ func_type[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); - _res = FunctionType ( a , b , p -> arena ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s func_type[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' type_expressions? ')' '->' expression NEWLINE* $")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// fstring: star_expressions -static expr_ty -fstring_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - { // star_expressions - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> fstring[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - D(fprintf(stderr, "%*c+ fstring[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - _res = star_expressions_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s fstring[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// type_expressions: -// | ','.expression+ ',' '*' expression ',' '**' expression -// | ','.expression+ ',' '*' expression -// | ','.expression+ ',' '**' expression -// | '*' expression ',' '**' expression -// | '*' expression -// | '**' expression -// | ','.expression+ -static asdl_seq* -type_expressions_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.expression+ ',' '*' expression ',' '**' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); - Token * _literal; - Token * _literal_1; - Token * _literal_2; - Token * _literal_3; - asdl_seq * a; - expr_ty b; - expr_ty c; - if ( - (a = _gather_3_rule(p)) // ','.expression+ - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' - && - (b = expression_rule(p)) // expression - && - (_literal_2 = _PyPegen_expect_token(p, 12)) // token=',' - && - (_literal_3 = _PyPegen_expect_token(p, 35)) // token='**' - && - (c = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); - _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_seq_append_to_end ( p , a , b ) ) , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '*' expression ',' '**' expression")); - } - { // ','.expression+ ',' '*' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression")); - Token * _literal; - Token * _literal_1; - asdl_seq * a; - expr_ty b; - if ( - (a = _gather_5_rule(p)) // ','.expression+ - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '*' expression")); - _res = _PyPegen_seq_append_to_end ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '*' expression")); - } - { // ','.expression+ ',' '**' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '**' expression")); - Token * _literal; - Token * _literal_1; - asdl_seq * a; - expr_ty b; - if ( - (a = _gather_7_rule(p)) // ','.expression+ - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_literal_1 = _PyPegen_expect_token(p, 35)) // token='**' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+ ',' '**' expression")); - _res = _PyPegen_seq_append_to_end ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+ ',' '**' expression")); - } - { // '*' expression ',' '**' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression ',' '**' expression")); - Token * _literal; - Token * _literal_1; - Token * _literal_2; - expr_ty a; - expr_ty b; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = expression_rule(p)) // expression - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - && - (_literal_2 = _PyPegen_expect_token(p, 35)) // token='**' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression ',' '**' expression")); - _res = _PyPegen_seq_append_to_end ( p , CHECK ( _PyPegen_singleton_seq ( p , a ) ) , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression ',' '**' expression")); - } - { // '*' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression")); - _res = _PyPegen_singleton_seq ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression")); - } - { // '**' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - && - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression")); - _res = _PyPegen_singleton_seq ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression")); - } - { // ','.expression+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> type_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.expression+")); - asdl_seq * _gather_9_var; - if ( - (_gather_9_var = _gather_9_rule(p)) // ','.expression+ - ) - { - D(fprintf(stderr, "%*c+ type_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.expression+")); - _res = _gather_9_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s type_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.expression+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// statements: statement+ -static asdl_seq* -statements_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // statement+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> statements[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement+")); - asdl_seq * a; - if ( - (a = _loop1_11_rule(p)) // statement+ - ) - { - D(fprintf(stderr, "%*c+ statements[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "statement+")); - _res = _PyPegen_seq_flatten ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s statements[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// statement: compound_stmt | simple_stmt -static asdl_seq* -statement_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // compound_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> statement[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compound_stmt")); - stmt_ty a; - if ( - (a = compound_stmt_rule(p)) // compound_stmt - ) - { - D(fprintf(stderr, "%*c+ statement[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "compound_stmt")); - _res = _PyPegen_singleton_seq ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s statement[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compound_stmt")); - } - { // simple_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> statement[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); - asdl_seq* simple_stmt_var; - if ( - (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt - ) - { - D(fprintf(stderr, "%*c+ statement[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); - _res = simple_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s statement[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// statement_newline: compound_stmt NEWLINE | simple_stmt | NEWLINE | $ -static asdl_seq* -statement_newline_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // compound_stmt NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compound_stmt NEWLINE")); - stmt_ty a; - Token * newline_var; - if ( - (a = compound_stmt_rule(p)) // compound_stmt - && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "compound_stmt NEWLINE")); - _res = _PyPegen_singleton_seq ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compound_stmt NEWLINE")); - } - { // simple_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); - asdl_seq* simple_stmt_var; - if ( - (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt - ) - { - D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); - _res = simple_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); - } - { // NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - Token * newline_var; - if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyPegen_singleton_seq ( p , CHECK ( _Py_Pass ( EXTRA ) ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); - } - { // $ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> statement_newline[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "$")); - Token * endmarker_var; - if ( - (endmarker_var = _PyPegen_expect_token(p, ENDMARKER)) // token='ENDMARKER' - ) - { - D(fprintf(stderr, "%*c+ statement_newline[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "$")); - _res = _PyPegen_interactive_exit ( p ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s statement_newline[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "$")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// simple_stmt: small_stmt !';' NEWLINE | ';'.small_stmt+ ';'? NEWLINE -static asdl_seq* -simple_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // small_stmt !';' NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "small_stmt !';' NEWLINE")); - stmt_ty a; - Token * newline_var; - if ( - (a = small_stmt_rule(p)) // small_stmt - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 13) // token=';' - && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "small_stmt !';' NEWLINE")); - _res = _PyPegen_singleton_seq ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "small_stmt !';' NEWLINE")); - } - { // ';'.small_stmt+ ';'? NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> simple_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - Token * newline_var; - if ( - (a = _gather_12_rule(p)) // ';'.small_stmt+ - && - (_opt_var = _PyPegen_expect_token(p, 13), 1) // ';'? - && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - D(fprintf(stderr, "%*c+ simple_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s simple_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'.small_stmt+ ';'? NEWLINE")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// small_stmt: -// | assignment -// | star_expressions -// | &'return' return_stmt -// | &('import' | 'from') import_stmt -// | &'raise' raise_stmt -// | 'pass' -// | &'del' del_stmt -// | &'yield' yield_stmt -// | &'assert' assert_stmt -// | 'break' -// | 'continue' -// | &'global' global_stmt -// | &'nonlocal' nonlocal_stmt -static stmt_ty -small_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - if (_PyPegen_is_memoized(p, small_stmt_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // assignment - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "assignment")); - stmt_ty assignment_var; - if ( - (assignment_var = assignment_rule(p)) // assignment - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "assignment")); - _res = assignment_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "assignment")); - } - { // star_expressions - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty e; - if ( - (e = star_expressions_rule(p)) // star_expressions - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Expr ( e , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); - } - { // &'return' return_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt")); - stmt_ty return_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 500) // token='return' - && - (return_stmt_var = return_stmt_rule(p)) // return_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'return' return_stmt")); - _res = return_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'return' return_stmt")); - } - { // &('import' | 'from') import_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); - stmt_ty import_stmt_var; - if ( - _PyPegen_lookahead(1, _tmp_14_rule, p) - && - (import_stmt_var = import_stmt_rule(p)) // import_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('import' | 'from') import_stmt")); - _res = import_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('import' | 'from') import_stmt")); - } - { // &'raise' raise_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); - stmt_ty raise_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 501) // token='raise' - && - (raise_stmt_var = raise_stmt_rule(p)) // raise_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'raise' raise_stmt")); - _res = raise_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'raise' raise_stmt")); - } - { // 'pass' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'pass'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 502)) // token='pass' - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'pass'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Pass ( EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'pass'")); - } - { // &'del' del_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); - stmt_ty del_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 503) // token='del' - && - (del_stmt_var = del_stmt_rule(p)) // del_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'del' del_stmt")); - _res = del_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'del' del_stmt")); - } - { // &'yield' yield_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); - stmt_ty yield_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 504) // token='yield' - && - (yield_stmt_var = yield_stmt_rule(p)) // yield_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'yield' yield_stmt")); - _res = yield_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'yield' yield_stmt")); - } - { // &'assert' assert_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); - stmt_ty assert_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 505) // token='assert' - && - (assert_stmt_var = assert_stmt_rule(p)) // assert_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'assert' assert_stmt")); - _res = assert_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'assert' assert_stmt")); - } - { // 'break' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'break'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 506)) // token='break' - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'break'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Break ( EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'break'")); - } - { // 'continue' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'continue'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 507)) // token='continue' - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'continue'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Continue ( EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'continue'")); - } - { // &'global' global_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt")); - stmt_ty global_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 508) // token='global' - && - (global_stmt_var = global_stmt_rule(p)) // global_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'global' global_stmt")); - _res = global_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'global' global_stmt")); - } - { // &'nonlocal' nonlocal_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> small_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt")); - stmt_ty nonlocal_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 509) // token='nonlocal' - && - (nonlocal_stmt_var = nonlocal_stmt_rule(p)) // nonlocal_stmt - ) - { - D(fprintf(stderr, "%*c+ small_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'nonlocal' nonlocal_stmt")); - _res = nonlocal_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s small_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'nonlocal' nonlocal_stmt")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, small_stmt_type, _res); - D(p->level--); - return _res; -} - -// compound_stmt: -// | &('def' | '@' | ASYNC) function_def -// | &'if' if_stmt -// | &('class' | '@') class_def -// | &('with' | ASYNC) with_stmt -// | &('for' | ASYNC) for_stmt -// | &'try' try_stmt -// | &'while' while_stmt -static stmt_ty -compound_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - { // &('def' | '@' | ASYNC) function_def - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); - stmt_ty function_def_var; - if ( - _PyPegen_lookahead(1, _tmp_15_rule, p) - && - (function_def_var = function_def_rule(p)) // function_def - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); - _res = function_def_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('def' | '@' | ASYNC) function_def")); - } - { // &'if' if_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); - stmt_ty if_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 510) // token='if' - && - (if_stmt_var = if_stmt_rule(p)) // if_stmt - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'if' if_stmt")); - _res = if_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'if' if_stmt")); - } - { // &('class' | '@') class_def - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def")); - stmt_ty class_def_var; - if ( - _PyPegen_lookahead(1, _tmp_16_rule, p) - && - (class_def_var = class_def_rule(p)) // class_def - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('class' | '@') class_def")); - _res = class_def_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('class' | '@') class_def")); - } - { // &('with' | ASYNC) with_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); - stmt_ty with_stmt_var; - if ( - _PyPegen_lookahead(1, _tmp_17_rule, p) - && - (with_stmt_var = with_stmt_rule(p)) // with_stmt - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('with' | ASYNC) with_stmt")); - _res = with_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('with' | ASYNC) with_stmt")); - } - { // &('for' | ASYNC) for_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); - stmt_ty for_stmt_var; - if ( - _PyPegen_lookahead(1, _tmp_18_rule, p) - && - (for_stmt_var = for_stmt_rule(p)) // for_stmt - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&('for' | ASYNC) for_stmt")); - _res = for_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&('for' | ASYNC) for_stmt")); - } - { // &'try' try_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); - stmt_ty try_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 511) // token='try' - && - (try_stmt_var = try_stmt_rule(p)) // try_stmt - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'try' try_stmt")); - _res = try_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'try' try_stmt")); - } - { // &'while' while_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compound_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); - stmt_ty while_stmt_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 512) // token='while' - && - (while_stmt_var = while_stmt_rule(p)) // while_stmt - ) - { - D(fprintf(stderr, "%*c+ compound_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'while' while_stmt")); - _res = while_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compound_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'while' while_stmt")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// assignment: -// | NAME ':' expression ['=' annotated_rhs] -// | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] -// | ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? -// | single_target augassign (yield_expr | star_expressions) -// | invalid_assignment -static stmt_ty -assignment_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME ':' expression ['=' annotated_rhs] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); - Token * _literal; - expr_ty a; - expr_ty b; - void *c; - if ( - (a = _PyPegen_name_token(p)) // NAME - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - && - (c = _tmp_19_rule(p), 1) // ['=' annotated_rhs] - ) - { - D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 6 , "Variable annotation syntax is" , _Py_AnnAssign ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , c , 1 , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':' expression ['=' annotated_rhs]")); - } - { // ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); - Token * _literal; - void *a; - expr_ty b; - void *c; - if ( - (a = _tmp_20_rule(p)) // '(' single_target ')' | single_subscript_attribute_target - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - && - (c = _tmp_21_rule(p), 1) // ['=' annotated_rhs] - ) - { - D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 6 , "Variable annotations syntax is" , _Py_AnnAssign ( a , b , c , 0 , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs]")); - } - { // ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); - asdl_seq * a; - void *b; - void *tc; - if ( - (a = _loop1_22_rule(p)) // ((star_targets '='))+ - && - (b = _tmp_23_rule(p)) // yield_expr | star_expressions - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 22) // token='=' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - ) - { - D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Assign ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); - } - { // single_target augassign (yield_expr | star_expressions) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); - expr_ty a; - AugOperator* b; - void *c; - if ( - (a = single_target_rule(p)) // single_target - && - (b = augassign_rule(p)) // augassign - && - (c = _tmp_24_rule(p)) // yield_expr | star_expressions - ) - { - D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_AugAssign ( a , b -> kind , c , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); - } - { // invalid_assignment - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_assignment")); - void *invalid_assignment_var; - if ( - (invalid_assignment_var = invalid_assignment_rule(p)) // invalid_assignment - ) - { - D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_assignment")); - _res = invalid_assignment_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_assignment")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// augassign: -// | '+=' -// | '-=' -// | '*=' -// | '@=' -// | '/=' -// | '%=' -// | '&=' -// | '|=' -// | '^=' -// | '<<=' -// | '>>=' -// | '**=' -// | '//=' -static AugOperator* -augassign_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - AugOperator* _res = NULL; - int _mark = p->mark; - { // '+=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 36)) // token='+=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+='")); - _res = _PyPegen_augoperator ( p , Add ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+='")); - } - { // '-=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 37)) // token='-=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-='")); - _res = _PyPegen_augoperator ( p , Sub ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-='")); - } - { // '*=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 38)) // token='*=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*='")); - _res = _PyPegen_augoperator ( p , Mult ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*='")); - } - { // '@=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 50)) // token='@=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@='")); - _res = CHECK_VERSION ( 5 , "The '@' operator is" , _PyPegen_augoperator ( p , MatMult ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@='")); - } - { // '/=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'/='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 39)) // token='/=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'/='")); - _res = _PyPegen_augoperator ( p , Div ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'/='")); - } - { // '%=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'%='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 40)) // token='%=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'%='")); - _res = _PyPegen_augoperator ( p , Mod ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'%='")); - } - { // '&=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'&='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 41)) // token='&=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'&='")); - _res = _PyPegen_augoperator ( p , BitAnd ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'&='")); - } - { // '|=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'|='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 42)) // token='|=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'|='")); - _res = _PyPegen_augoperator ( p , BitOr ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'|='")); - } - { // '^=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'^='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 43)) // token='^=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'^='")); - _res = _PyPegen_augoperator ( p , BitXor ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'^='")); - } - { // '<<=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<<='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 44)) // token='<<=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<<='")); - _res = _PyPegen_augoperator ( p , LShift ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<<='")); - } - { // '>>=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>>='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 45)) // token='>>=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>>='")); - _res = _PyPegen_augoperator ( p , RShift ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>>='")); - } - { // '**=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 46)) // token='**=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**='")); - _res = _PyPegen_augoperator ( p , Pow ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**='")); - } - { // '//=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> augassign[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'//='")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 48)) // token='//=' - ) - { - D(fprintf(stderr, "%*c+ augassign[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'//='")); - _res = _PyPegen_augoperator ( p , FloorDiv ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s augassign[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'//='")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// global_stmt: 'global' ','.NAME+ -static stmt_ty -global_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'global' ','.NAME+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> global_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+")); - Token * _keyword; - asdl_seq * a; - if ( - (_keyword = _PyPegen_expect_token(p, 508)) // token='global' - && - (a = _gather_25_rule(p)) // ','.NAME+ - ) - { - D(fprintf(stderr, "%*c+ global_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'global' ','.NAME+")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Global ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s global_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'global' ','.NAME+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// nonlocal_stmt: 'nonlocal' ','.NAME+ -static stmt_ty -nonlocal_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'nonlocal' ','.NAME+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> nonlocal_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+")); - Token * _keyword; - asdl_seq * a; - if ( - (_keyword = _PyPegen_expect_token(p, 509)) // token='nonlocal' - && - (a = _gather_27_rule(p)) // ','.NAME+ - ) - { - D(fprintf(stderr, "%*c+ nonlocal_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'nonlocal' ','.NAME+")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Nonlocal ( CHECK ( _PyPegen_map_names_to_ids ( p , a ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s nonlocal_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'nonlocal' ','.NAME+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// yield_stmt: yield_expr -static stmt_ty -yield_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // yield_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> yield_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); - expr_ty y; - if ( - (y = yield_expr_rule(p)) // yield_expr - ) - { - D(fprintf(stderr, "%*c+ yield_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Expr ( y , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s yield_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// assert_stmt: 'assert' expression [',' expression] -static stmt_ty -assert_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'assert' expression [',' expression] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> assert_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]")); - Token * _keyword; - expr_ty a; - void *b; - if ( - (_keyword = _PyPegen_expect_token(p, 505)) // token='assert' - && - (a = expression_rule(p)) // expression - && - (b = _tmp_29_rule(p), 1) // [',' expression] - ) - { - D(fprintf(stderr, "%*c+ assert_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'assert' expression [',' expression]")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Assert ( a , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s assert_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'assert' expression [',' expression]")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// del_stmt: 'del' del_targets -static stmt_ty -del_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'del' del_targets - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); - Token * _keyword; - asdl_seq* a; - if ( - (_keyword = _PyPegen_expect_token(p, 503)) // token='del' - && - (a = del_targets_rule(p)) // del_targets - ) - { - D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Delete ( a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'del' del_targets")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// import_stmt: import_name | import_from -static stmt_ty -import_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - { // import_name - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_name")); - stmt_ty import_name_var; - if ( - (import_name_var = import_name_rule(p)) // import_name - ) - { - D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_name")); - _res = import_name_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_name")); - } - { // import_from - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from")); - stmt_ty import_from_var; - if ( - (import_from_var = import_from_rule(p)) // import_from - ) - { - D(fprintf(stderr, "%*c+ import_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from")); - _res = import_from_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// import_name: 'import' dotted_as_names -static stmt_ty -import_name_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'import' dotted_as_names - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import' dotted_as_names")); - Token * _keyword; - asdl_seq* a; - if ( - (_keyword = _PyPegen_expect_token(p, 513)) // token='import' - && - (a = dotted_as_names_rule(p)) // dotted_as_names - ) - { - D(fprintf(stderr, "%*c+ import_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import' dotted_as_names")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Import ( a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_name[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import' dotted_as_names")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// import_from: -// | 'from' (('.' | '...'))* dotted_name 'import' import_from_targets -// | 'from' (('.' | '...'))+ 'import' import_from_targets -static stmt_ty -import_from_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'from' (('.' | '...'))* dotted_name 'import' import_from_targets - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); - Token * _keyword; - Token * _keyword_1; - asdl_seq * a; - expr_ty b; - asdl_seq* c; - if ( - (_keyword = _PyPegen_expect_token(p, 514)) // token='from' - && - (a = _loop0_30_rule(p)) // (('.' | '...'))* - && - (b = dotted_name_rule(p)) // dotted_name - && - (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' - && - (c = import_from_targets_rule(p)) // import_from_targets - ) - { - D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_ImportFrom ( b -> v . Name . id , c , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))* dotted_name 'import' import_from_targets")); - } - { // 'from' (('.' | '...'))+ 'import' import_from_targets - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); - Token * _keyword; - Token * _keyword_1; - asdl_seq * a; - asdl_seq* b; - if ( - (_keyword = _PyPegen_expect_token(p, 514)) // token='from' - && - (a = _loop1_31_rule(p)) // (('.' | '...'))+ - && - (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' - && - (b = import_from_targets_rule(p)) // import_from_targets - ) - { - D(fprintf(stderr, "%*c+ import_from[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_ImportFrom ( NULL , b , _PyPegen_seq_count_dots ( a ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' (('.' | '...'))+ 'import' import_from_targets")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// import_from_targets: -// | '(' import_from_as_names ','? ')' -// | import_from_as_names !',' -// | '*' -// | invalid_import_from_targets -static asdl_seq* -import_from_targets_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // '(' import_from_as_names ','? ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' import_from_as_names ','? ')'")); - Token * _literal; - Token * _literal_1; - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq* a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = import_from_as_names_rule(p)) // import_from_as_names - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' import_from_as_names ','? ')'")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' import_from_as_names ','? ')'")); - } - { // import_from_as_names !',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names !','")); - asdl_seq* import_from_as_names_var; - if ( - (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' - ) - { - D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names !','")); - _res = import_from_as_names_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names !','")); - } - { // '*' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - ) - { - D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*'")); - _res = _PyPegen_singleton_seq ( p , CHECK ( _PyPegen_alias_for_star ( p ) ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*'")); - } - { // invalid_import_from_targets - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_import_from_targets")); - void *invalid_import_from_targets_var; - if ( - (invalid_import_from_targets_var = invalid_import_from_targets_rule(p)) // invalid_import_from_targets - ) - { - D(fprintf(stderr, "%*c+ import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_import_from_targets")); - _res = invalid_import_from_targets_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_import_from_targets")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// import_from_as_names: ','.import_from_as_name+ -static asdl_seq* -import_from_as_names_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.import_from_as_name+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); - asdl_seq * a; - if ( - (a = _gather_32_rule(p)) // ','.import_from_as_name+ - ) - { - D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from_as_names[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.import_from_as_name+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// import_from_as_name: NAME ['as' NAME] -static alias_ty -import_from_as_name_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - alias_ty _res = NULL; - int _mark = p->mark; - { // NAME ['as' NAME] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> import_from_as_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); - expr_ty a; - void *b; - if ( - (a = _PyPegen_name_token(p)) // NAME - && - (b = _tmp_34_rule(p), 1) // ['as' NAME] - ) - { - D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); - _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s import_from_as_name[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ['as' NAME]")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// dotted_as_names: ','.dotted_as_name+ -static asdl_seq* -dotted_as_names_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.dotted_as_name+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); - asdl_seq * a; - if ( - (a = _gather_35_rule(p)) // ','.dotted_as_name+ - ) - { - D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s dotted_as_names[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.dotted_as_name+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// dotted_as_name: dotted_name ['as' NAME] -static alias_ty -dotted_as_name_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - alias_ty _res = NULL; - int _mark = p->mark; - { // dotted_name ['as' NAME] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> dotted_as_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); - expr_ty a; - void *b; - if ( - (a = dotted_name_rule(p)) // dotted_name - && - (b = _tmp_37_rule(p), 1) // ['as' NAME] - ) - { - D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); - _res = _Py_alias ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Name . id : NULL , p -> arena ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s dotted_as_name[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name ['as' NAME]")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// Left-recursive -// dotted_name: dotted_name '.' NAME | NAME -static expr_ty dotted_name_raw(Parser *); -static expr_ty -dotted_name_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, dotted_name_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_0 = _PyPegen_update_memo(p, _mark, dotted_name_type, _res); - if (tmpvar_0) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = dotted_name_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -dotted_name_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - { // dotted_name '.' NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> dotted_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_name '.' NAME")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = dotted_name_rule(p)) // dotted_name - && - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - && - (b = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ dotted_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name '.' NAME")); - _res = _PyPegen_join_names_with_dot ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s dotted_name[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_name '.' NAME")); - } - { // NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> dotted_name[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); - expr_ty name_var; - if ( - (name_var = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ dotted_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); - _res = name_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s dotted_name[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// if_stmt: -// | 'if' named_expression ':' block elif_stmt -// | 'if' named_expression ':' block else_block? -static stmt_ty -if_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'if' named_expression ':' block elif_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> if_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block elif_stmt")); - Token * _keyword; - Token * _literal; - expr_ty a; - asdl_seq* b; - stmt_ty c; - if ( - (_keyword = _PyPegen_expect_token(p, 510)) // token='if' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = elif_stmt_rule(p)) // elif_stmt - ) - { - D(fprintf(stderr, "%*c+ if_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block elif_stmt")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s if_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' named_expression ':' block elif_stmt")); - } - { // 'if' named_expression ':' block else_block? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> if_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block else_block?")); - Token * _keyword; - Token * _literal; - expr_ty a; - asdl_seq* b; - void *c; - if ( - (_keyword = _PyPegen_expect_token(p, 510)) // token='if' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = else_block_rule(p), 1) // else_block? - ) - { - D(fprintf(stderr, "%*c+ if_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' named_expression ':' block else_block?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_If ( a , b , c , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s if_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' named_expression ':' block else_block?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// elif_stmt: -// | 'elif' named_expression ':' block elif_stmt -// | 'elif' named_expression ':' block else_block? -static stmt_ty -elif_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'elif' named_expression ':' block elif_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> elif_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); - Token * _keyword; - Token * _literal; - expr_ty a; - asdl_seq* b; - stmt_ty c; - if ( - (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = elif_stmt_rule(p)) // elif_stmt - ) - { - D(fprintf(stderr, "%*c+ elif_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_If ( a , b , CHECK ( _PyPegen_singleton_seq ( p , c ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s elif_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'elif' named_expression ':' block elif_stmt")); - } - { // 'elif' named_expression ':' block else_block? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> elif_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block else_block?")); - Token * _keyword; - Token * _literal; - expr_ty a; - asdl_seq* b; - void *c; - if ( - (_keyword = _PyPegen_expect_token(p, 515)) // token='elif' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = else_block_rule(p), 1) // else_block? - ) - { - D(fprintf(stderr, "%*c+ elif_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'elif' named_expression ':' block else_block?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_If ( a , b , c , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s elif_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'elif' named_expression ':' block else_block?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// else_block: 'else' ':' block -static asdl_seq* -else_block_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // 'else' ':' block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> else_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'else' ':' block")); - Token * _keyword; - Token * _literal; - asdl_seq* b; - if ( - (_keyword = _PyPegen_expect_token(p, 516)) // token='else' - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ else_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'else' ':' block")); - _res = b; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s else_block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'else' ':' block")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// while_stmt: 'while' named_expression ':' block else_block? -static stmt_ty -while_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'while' named_expression ':' block else_block? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> while_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'while' named_expression ':' block else_block?")); - Token * _keyword; - Token * _literal; - expr_ty a; - asdl_seq* b; - void *c; - if ( - (_keyword = _PyPegen_expect_token(p, 512)) // token='while' - && - (a = named_expression_rule(p)) // named_expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (c = else_block_rule(p), 1) // else_block? - ) - { - D(fprintf(stderr, "%*c+ while_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'while' named_expression ':' block else_block?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_While ( a , b , c , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s while_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'while' named_expression ':' block else_block?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// for_stmt: -// | 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? -// | ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? -static stmt_ty -for_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); - Token * _keyword; - Token * _keyword_1; - Token * _literal; - asdl_seq* b; - void *el; - expr_ty ex; - expr_ty t; - void *tc; - if ( - (_keyword = _PyPegen_expect_token(p, 517)) // token='for' - && - (t = star_targets_rule(p)) // star_targets - && - (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' - && - (ex = star_expressions_rule(p)) // star_expressions - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - && - (el = else_block_rule(p), 1) // else_block? - ) - { - D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_For ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); - } - { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); - Token * _keyword; - Token * _keyword_1; - Token * _literal; - Token * async_var; - asdl_seq* b; - void *el; - expr_ty ex; - expr_ty t; - void *tc; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = _PyPegen_expect_token(p, 517)) // token='for' - && - (t = star_targets_rule(p)) // star_targets - && - (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' - && - (ex = star_expressions_rule(p)) // star_expressions - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - && - (el = else_block_rule(p), 1) // else_block? - ) - { - D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 5 , "Async for loops are" , _Py_AsyncFor ( t , ex , b , el , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// with_stmt: -// | 'with' '(' ','.with_item+ ','? ')' ':' block -// | 'with' ','.with_item+ ':' TYPE_COMMENT? block -// | ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block -// | ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block -static stmt_ty -with_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'with' '(' ','.with_item+ ','? ')' ':' block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); - Token * _keyword; - Token * _literal; - Token * _literal_1; - Token * _literal_2; - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - asdl_seq* b; - if ( - (_keyword = _PyPegen_expect_token(p, 519)) // token='with' - && - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = _gather_38_rule(p)) // ','.with_item+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - && - (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_With ( a , b , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' '(' ','.with_item+ ','? ')' ':' block")); - } - { // 'with' ','.with_item+ ':' TYPE_COMMENT? block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); - Token * _keyword; - Token * _literal; - asdl_seq * a; - asdl_seq* b; - void *tc; - if ( - (_keyword = _PyPegen_expect_token(p, 519)) // token='with' - && - (a = _gather_40_rule(p)) // ','.with_item+ - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_With ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with' ','.with_item+ ':' TYPE_COMMENT? block")); - } - { // ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); - Token * _keyword; - Token * _literal; - Token * _literal_1; - Token * _literal_2; - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - Token * async_var; - asdl_seq* b; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = _PyPegen_expect_token(p, 519)) // token='with' - && - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = _gather_42_rule(p)) // ','.with_item+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - && - (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NULL , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' '(' ','.with_item+ ','? ')' ':' block")); - } - { // ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> with_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); - Token * _keyword; - Token * _literal; - asdl_seq * a; - Token * async_var; - asdl_seq* b; - void *tc; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = _PyPegen_expect_token(p, 519)) // token='with' - && - (a = _gather_44_rule(p)) // ','.with_item+ - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ with_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 5 , "Async with statements are" , _Py_AsyncWith ( a , b , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s with_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'with' ','.with_item+ ':' TYPE_COMMENT? block")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// with_item: expression ['as' target] -static withitem_ty -with_item_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - withitem_ty _res = NULL; - int _mark = p->mark; - { // expression ['as' target] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); - expr_ty e; - void *o; - if ( - (e = expression_rule(p)) // expression - && - (o = _tmp_46_rule(p), 1) // ['as' target] - ) - { - D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); - _res = _Py_withitem ( e , o , p -> arena ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s with_item[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' target]")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// try_stmt: -// | 'try' ':' block finally_block -// | 'try' ':' block except_block+ else_block? finally_block? -static stmt_ty -try_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'try' ':' block finally_block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block finally_block")); - Token * _keyword; - Token * _literal; - asdl_seq* b; - asdl_seq* f; - if ( - (_keyword = _PyPegen_expect_token(p, 511)) // token='try' - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (f = finally_block_rule(p)) // finally_block - ) - { - D(fprintf(stderr, "%*c+ try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block finally_block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Try ( b , NULL , NULL , f , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s try_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block finally_block")); - } - { // 'try' ':' block except_block+ else_block? finally_block? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> try_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); - Token * _keyword; - Token * _literal; - asdl_seq* b; - void *el; - asdl_seq * ex; - void *f; - if ( - (_keyword = _PyPegen_expect_token(p, 511)) // token='try' - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - && - (ex = _loop1_47_rule(p)) // except_block+ - && - (el = else_block_rule(p), 1) // else_block? - && - (f = finally_block_rule(p), 1) // finally_block? - ) - { - D(fprintf(stderr, "%*c+ try_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Try ( b , ex , el , f , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s try_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'try' ':' block except_block+ else_block? finally_block?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// except_block: 'except' expression ['as' NAME] ':' block | 'except' ':' block -static excepthandler_ty -except_block_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - excepthandler_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'except' expression ['as' NAME] ':' block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block")); - Token * _keyword; - Token * _literal; - asdl_seq* b; - expr_ty e; - void *t; - if ( - (_keyword = _PyPegen_expect_token(p, 520)) // token='except' - && - (e = expression_rule(p)) // expression - && - (t = _tmp_48_rule(p), 1) // ['as' NAME] - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' expression ['as' NAME] ':' block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_ExceptHandler ( e , ( t ) ? ( ( expr_ty ) t ) -> v . Name . id : NULL , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' expression ['as' NAME] ':' block")); - } - { // 'except' ':' block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> except_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'except' ':' block")); - Token * _keyword; - Token * _literal; - asdl_seq* b; - if ( - (_keyword = _PyPegen_expect_token(p, 520)) // token='except' - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ except_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'except' ':' block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_ExceptHandler ( NULL , NULL , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s except_block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'except' ':' block")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// finally_block: 'finally' ':' block -static asdl_seq* -finally_block_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // 'finally' ':' block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> finally_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'finally' ':' block")); - Token * _keyword; - Token * _literal; - asdl_seq* a; - if ( - (_keyword = _PyPegen_expect_token(p, 521)) // token='finally' - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (a = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ finally_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'finally' ':' block")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s finally_block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'finally' ':' block")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// return_stmt: 'return' star_expressions? -static stmt_ty -return_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'return' star_expressions? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> return_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'return' star_expressions?")); - Token * _keyword; - void *a; - if ( - (_keyword = _PyPegen_expect_token(p, 500)) // token='return' - && - (a = star_expressions_rule(p), 1) // star_expressions? - ) - { - D(fprintf(stderr, "%*c+ return_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'return' star_expressions?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Return ( a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s return_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'return' star_expressions?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// raise_stmt: 'raise' expression ['from' expression] | 'raise' -static stmt_ty -raise_stmt_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'raise' expression ['from' expression] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); - Token * _keyword; - expr_ty a; - void *b; - if ( - (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' - && - (a = expression_rule(p)) // expression - && - (b = _tmp_49_rule(p), 1) // ['from' expression] - ) - { - D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Raise ( a , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise' expression ['from' expression]")); - } - { // 'raise' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> raise_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'raise'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 501)) // token='raise' - ) - { - D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Raise ( NULL , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s raise_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'raise'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// function_def: decorators function_def_raw | function_def_raw -static stmt_ty -function_def_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - { // decorators function_def_raw - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> function_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "decorators function_def_raw")); - asdl_seq* d; - stmt_ty f; - if ( - (d = decorators_rule(p)) // decorators - && - (f = function_def_raw_rule(p)) // function_def_raw - ) - { - D(fprintf(stderr, "%*c+ function_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "decorators function_def_raw")); - _res = _PyPegen_function_def_decorators ( p , d , f ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s function_def[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "decorators function_def_raw")); - } - { // function_def_raw - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> function_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "function_def_raw")); - stmt_ty function_def_raw_var; - if ( - (function_def_raw_var = function_def_raw_rule(p)) // function_def_raw - ) - { - D(fprintf(stderr, "%*c+ function_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "function_def_raw")); - _res = function_def_raw_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s function_def[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "function_def_raw")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// function_def_raw: -// | 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block -// | ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block -static stmt_ty -function_def_raw_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); - Token * _keyword; - Token * _literal; - Token * _literal_1; - Token * _literal_2; - void *a; - asdl_seq* b; - expr_ty n; - void *params; - void *tc; - if ( - (_keyword = _PyPegen_expect_token(p, 522)) // token='def' - && - (n = _PyPegen_name_token(p)) // NAME - && - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (params = params_rule(p), 1) // params? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - && - (a = _tmp_50_rule(p), 1) // ['->' expression] - && - (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' - && - (tc = func_type_comment_rule(p), 1) // func_type_comment? - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_FunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); - } - { // ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> function_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); - Token * _keyword; - Token * _literal; - Token * _literal_1; - Token * _literal_2; - void *a; - Token * async_var; - asdl_seq* b; - expr_ty n; - void *params; - void *tc; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = _PyPegen_expect_token(p, 522)) // token='def' - && - (n = _PyPegen_name_token(p)) // NAME - && - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (params = params_rule(p), 1) // params? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - && - (a = _tmp_51_rule(p), 1) // ['->' expression] - && - (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' - && - (tc = func_type_comment_rule(p), 1) // func_type_comment? - && - (b = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ function_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 5 , "Async functions are" , _Py_AsyncFunctionDef ( n -> v . Name . id , ( params ) ? params : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , NULL , a , NEW_TYPE_COMMENT ( p , tc ) , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s function_def_raw[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'def' NAME '(' params? ')' ['->' expression] ':' func_type_comment? block")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// func_type_comment: -// | NEWLINE TYPE_COMMENT &(NEWLINE INDENT) -// | invalid_double_type_comments -// | TYPE_COMMENT -static Token* -func_type_comment_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - Token* _res = NULL; - int _mark = p->mark; - { // NEWLINE TYPE_COMMENT &(NEWLINE INDENT) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); - Token * newline_var; - Token * t; - if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - && - (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - && - _PyPegen_lookahead(1, _tmp_52_rule, p) - ) - { - D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); - _res = t; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); - } - { // invalid_double_type_comments - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_double_type_comments")); - void *invalid_double_type_comments_var; - if ( - (invalid_double_type_comments_var = invalid_double_type_comments_rule(p)) // invalid_double_type_comments - ) - { - D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_double_type_comments")); - _res = invalid_double_type_comments_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_double_type_comments")); - } - { // TYPE_COMMENT - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> func_type_comment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT")); - Token * type_comment_var; - if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - ) - { - D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT")); - _res = type_comment_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s func_type_comment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "TYPE_COMMENT")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// params: invalid_parameters | parameters -static arguments_ty -params_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arguments_ty _res = NULL; - int _mark = p->mark; - { // invalid_parameters - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_parameters")); - void *invalid_parameters_var; - if ( - (invalid_parameters_var = invalid_parameters_rule(p)) // invalid_parameters - ) - { - D(fprintf(stderr, "%*c+ params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_parameters")); - _res = invalid_parameters_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s params[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_parameters")); - } - { // parameters - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "parameters")); - arguments_ty parameters_var; - if ( - (parameters_var = parameters_rule(p)) // parameters - ) - { - D(fprintf(stderr, "%*c+ params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "parameters")); - _res = parameters_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s params[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "parameters")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// parameters: -// | slash_no_default param_no_default* param_with_default* star_etc? -// | slash_with_default param_with_default* star_etc? -// | param_no_default+ param_with_default* star_etc? -// | param_with_default+ star_etc? -// | star_etc -static arguments_ty -parameters_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arguments_ty _res = NULL; - int _mark = p->mark; - { // slash_no_default param_no_default* param_with_default* star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); - asdl_seq* a; - asdl_seq * b; - asdl_seq * c; - void *d; - if ( - (a = slash_no_default_rule(p)) // slash_no_default - && - (b = _loop0_53_rule(p)) // param_no_default* - && - (c = _loop0_54_rule(p)) // param_with_default* - && - (d = star_etc_rule(p), 1) // star_etc? - ) - { - D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); - _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_no_default param_no_default* param_with_default* star_etc?")); - } - { // slash_with_default param_with_default* star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default param_with_default* star_etc?")); - SlashWithDefault* a; - asdl_seq * b; - void *c; - if ( - (a = slash_with_default_rule(p)) // slash_with_default - && - (b = _loop0_55_rule(p)) // param_with_default* - && - (c = star_etc_rule(p), 1) // star_etc? - ) - { - D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default param_with_default* star_etc?")); - _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default param_with_default* star_etc?")); - } - { // param_no_default+ param_with_default* star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); - asdl_seq * a; - asdl_seq * b; - void *c; - if ( - (a = _loop1_56_rule(p)) // param_no_default+ - && - (b = _loop0_57_rule(p)) // param_with_default* - && - (c = star_etc_rule(p), 1) // star_etc? - ) - { - D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); - _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ param_with_default* star_etc?")); - } - { // param_with_default+ star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+ star_etc?")); - asdl_seq * a; - void *b; - if ( - (a = _loop1_58_rule(p)) // param_with_default+ - && - (b = star_etc_rule(p), 1) // star_etc? - ) - { - D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+ star_etc?")); - _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+ star_etc?")); - } - { // star_etc - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_etc")); - StarEtc* a; - if ( - (a = star_etc_rule(p)) // star_etc - ) - { - D(fprintf(stderr, "%*c+ parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_etc")); - _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_etc")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// slash_no_default: param_no_default+ '/' ',' | param_no_default+ '/' &')' -static asdl_seq* -slash_no_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // param_no_default+ '/' ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' ','")); - Token * _literal; - Token * _literal_1; - asdl_seq * a; - if ( - (a = _loop1_59_rule(p)) // param_no_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' ','")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ '/' ','")); - } - { // param_no_default+ '/' &')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' &')'")); - Token * _literal; - asdl_seq * a; - if ( - (a = _loop1_60_rule(p)) // param_no_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' - ) - { - D(fprintf(stderr, "%*c+ slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default+ '/' &')'")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default+ '/' &')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// slash_with_default: -// | param_no_default* param_with_default+ '/' ',' -// | param_no_default* param_with_default+ '/' &')' -static SlashWithDefault* -slash_with_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - SlashWithDefault* _res = NULL; - int _mark = p->mark; - { // param_no_default* param_with_default+ '/' ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); - Token * _literal; - Token * _literal_1; - asdl_seq * a; - asdl_seq * b; - if ( - (a = _loop0_61_rule(p)) // param_no_default* - && - (b = _loop1_62_rule(p)) // param_with_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); - _res = _PyPegen_slash_with_default ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* param_with_default+ '/' ','")); - } - { // param_no_default* param_with_default+ '/' &')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); - Token * _literal; - asdl_seq * a; - asdl_seq * b; - if ( - (a = _loop0_63_rule(p)) // param_no_default* - && - (b = _loop1_64_rule(p)) // param_with_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' - ) - { - D(fprintf(stderr, "%*c+ slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); - _res = _PyPegen_slash_with_default ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* param_with_default+ '/' &')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// star_etc: -// | '*' param_no_default param_maybe_default* kwds? -// | '*' ',' param_maybe_default+ kwds? -// | kwds -// | invalid_star_etc -static StarEtc* -star_etc_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - StarEtc* _res = NULL; - int _mark = p->mark; - { // '*' param_no_default param_maybe_default* kwds? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); - Token * _literal; - arg_ty a; - asdl_seq * b; - void *c; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = param_no_default_rule(p)) // param_no_default - && - (b = _loop0_65_rule(p)) // param_maybe_default* - && - (c = kwds_rule(p), 1) // kwds? - ) - { - D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); - _res = _PyPegen_star_etc ( p , a , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' param_no_default param_maybe_default* kwds?")); - } - { // '*' ',' param_maybe_default+ kwds? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); - Token * _literal; - Token * _literal_1; - asdl_seq * b; - void *c; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - && - (b = _loop1_66_rule(p)) // param_maybe_default+ - && - (c = kwds_rule(p), 1) // kwds? - ) - { - D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); - _res = _PyPegen_star_etc ( p , NULL , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' param_maybe_default+ kwds?")); - } - { // kwds - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwds")); - arg_ty a; - if ( - (a = kwds_rule(p)) // kwds - ) - { - D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwds")); - _res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwds")); - } - { // invalid_star_etc - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_star_etc")); - void *invalid_star_etc_var; - if ( - (invalid_star_etc_var = invalid_star_etc_rule(p)) // invalid_star_etc - ) - { - D(fprintf(stderr, "%*c+ star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_star_etc")); - _res = invalid_star_etc_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_star_etc")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// kwds: '**' param_no_default -static arg_ty -kwds_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arg_ty _res = NULL; - int _mark = p->mark; - { // '**' param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwds[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' param_no_default")); - Token * _literal; - arg_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - && - (a = param_no_default_rule(p)) // param_no_default - ) - { - D(fprintf(stderr, "%*c+ kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' param_no_default")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwds[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' param_no_default")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// param_no_default: param ',' TYPE_COMMENT? | param TYPE_COMMENT? &')' -static arg_ty -param_no_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arg_ty _res = NULL; - int _mark = p->mark; - { // param ',' TYPE_COMMENT? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param ',' TYPE_COMMENT?")); - Token * _literal; - arg_ty a; - void *tc; - if ( - (a = param_rule(p)) // param - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - ) - { - D(fprintf(stderr, "%*c+ param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param ',' TYPE_COMMENT?")); - _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s param_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param ',' TYPE_COMMENT?")); - } - { // param TYPE_COMMENT? &')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param TYPE_COMMENT? &')'")); - arg_ty a; - void *tc; - if ( - (a = param_rule(p)) // param - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' - ) - { - D(fprintf(stderr, "%*c+ param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param TYPE_COMMENT? &')'")); - _res = _PyPegen_add_type_comment_to_arg ( p , a , tc ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s param_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param TYPE_COMMENT? &')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// param_with_default: param default ',' TYPE_COMMENT? | param default TYPE_COMMENT? &')' -static NameDefaultPair* -param_with_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - NameDefaultPair* _res = NULL; - int _mark = p->mark; - { // param default ',' TYPE_COMMENT? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default ',' TYPE_COMMENT?")); - Token * _literal; - arg_ty a; - expr_ty c; - void *tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p)) // default - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - ) - { - D(fprintf(stderr, "%*c+ param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default ',' TYPE_COMMENT?")); - _res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s param_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default ',' TYPE_COMMENT?")); - } - { // param default TYPE_COMMENT? &')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default TYPE_COMMENT? &')'")); - arg_ty a; - expr_ty c; - void *tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p)) // default - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' - ) - { - D(fprintf(stderr, "%*c+ param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default TYPE_COMMENT? &')'")); - _res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s param_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default TYPE_COMMENT? &')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// param_maybe_default: -// | param default? ',' TYPE_COMMENT? -// | param default? TYPE_COMMENT? &')' -static NameDefaultPair* -param_maybe_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - NameDefaultPair* _res = NULL; - int _mark = p->mark; - { // param default? ',' TYPE_COMMENT? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default? ',' TYPE_COMMENT?")); - Token * _literal; - arg_ty a; - void *c; - void *tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p), 1) // default? - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - ) - { - D(fprintf(stderr, "%*c+ param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default? ',' TYPE_COMMENT?")); - _res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default? ',' TYPE_COMMENT?")); - } - { // param default? TYPE_COMMENT? &')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param default? TYPE_COMMENT? &')'")); - arg_ty a; - void *c; - void *tc; - if ( - (a = param_rule(p)) // param - && - (c = default_rule(p), 1) // default? - && - (tc = _PyPegen_expect_token(p, TYPE_COMMENT), 1) // TYPE_COMMENT? - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' - ) - { - D(fprintf(stderr, "%*c+ param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param default? TYPE_COMMENT? &')'")); - _res = _PyPegen_name_default_pair ( p , a , c , tc ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param default? TYPE_COMMENT? &')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// param: NAME annotation? -static arg_ty -param_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arg_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME annotation? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> param[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME annotation?")); - expr_ty a; - void *b; - if ( - (a = _PyPegen_name_token(p)) // NAME - && - (b = annotation_rule(p), 1) // annotation? - ) - { - D(fprintf(stderr, "%*c+ param[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME annotation?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_arg ( a -> v . Name . id , b , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s param[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME annotation?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// annotation: ':' expression -static expr_ty -annotation_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - { // ':' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> annotation[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ annotation[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s annotation[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// default: '=' expression -static expr_ty -default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - { // '=' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' expression")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' expression")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// decorators: (('@' named_expression NEWLINE))+ -static asdl_seq* -decorators_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // (('@' named_expression NEWLINE))+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); - asdl_seq * a; - if ( - (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ - ) - { - D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s decorators[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(('@' named_expression NEWLINE))+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// class_def: decorators class_def_raw | class_def_raw -static stmt_ty -class_def_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - { // decorators class_def_raw - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> class_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "decorators class_def_raw")); - asdl_seq* a; - stmt_ty b; - if ( - (a = decorators_rule(p)) // decorators - && - (b = class_def_raw_rule(p)) // class_def_raw - ) - { - D(fprintf(stderr, "%*c+ class_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "decorators class_def_raw")); - _res = _PyPegen_class_def_decorators ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s class_def[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "decorators class_def_raw")); - } - { // class_def_raw - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> class_def[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "class_def_raw")); - stmt_ty class_def_raw_var; - if ( - (class_def_raw_var = class_def_raw_rule(p)) // class_def_raw - ) - { - D(fprintf(stderr, "%*c+ class_def[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "class_def_raw")); - _res = class_def_raw_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s class_def[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "class_def_raw")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// class_def_raw: 'class' NAME ['(' arguments? ')'] ':' block -static stmt_ty -class_def_raw_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - stmt_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'class' NAME ['(' arguments? ')'] ':' block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> class_def_raw[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); - Token * _keyword; - Token * _literal; - expr_ty a; - void *b; - asdl_seq* c; - if ( - (_keyword = _PyPegen_expect_token(p, 523)) // token='class' - && - (a = _PyPegen_name_token(p)) // NAME - && - (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (c = block_rule(p)) // block - ) - { - D(fprintf(stderr, "%*c+ class_def_raw[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_ClassDef ( a -> v . Name . id , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , c , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s class_def_raw[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class' NAME ['(' arguments? ')'] ':' block")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// block: NEWLINE INDENT statements DEDENT | simple_stmt | invalid_block -static asdl_seq* -block_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - if (_PyPegen_is_memoized(p, block_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - { // NEWLINE INDENT statements DEDENT - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT statements DEDENT")); - asdl_seq* a; - Token * dedent_var; - Token * indent_var; - Token * newline_var; - if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - && - (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' - && - (a = statements_rule(p)) // statements - && - (dedent_var = _PyPegen_expect_token(p, DEDENT)) // token='DEDENT' - ) - { - D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT statements DEDENT")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT statements DEDENT")); - } - { // simple_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "simple_stmt")); - asdl_seq* simple_stmt_var; - if ( - (simple_stmt_var = simple_stmt_rule(p)) // simple_stmt - ) - { - D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "simple_stmt")); - _res = simple_stmt_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "simple_stmt")); - } - { // invalid_block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_block")); - void *invalid_block_var; - if ( - (invalid_block_var = invalid_block_rule(p)) // invalid_block - ) - { - D(fprintf(stderr, "%*c+ block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_block")); - _res = invalid_block_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_block")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, block_type, _res); - D(p->level--); - return _res; -} - -// expressions_list: ','.star_expression+ ','? -static asdl_seq* -expressions_list_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.star_expression+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> expressions_list[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_expression+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - if ( - (a = _gather_69_rule(p)) // ','.star_expression+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ expressions_list[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_expression+ ','?")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s expressions_list[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_expression+ ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// star_expressions: -// | star_expression ((',' star_expression))+ ','? -// | star_expression ',' -// | star_expression -static expr_ty -star_expressions_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // star_expression ((',' star_expression))+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - expr_ty a; - asdl_seq * b; - if ( - (a = star_expression_rule(p)) // star_expression - && - (b = _loop1_71_rule(p)) // ((',' star_expression))+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression ((',' star_expression))+ ','?")); - } - { // star_expression ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression ','")); - Token * _literal; - expr_ty a; - if ( - (a = star_expression_rule(p)) // star_expression - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression ','")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression ','")); - } - { // star_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression")); - expr_ty star_expression_var; - if ( - (star_expression_var = star_expression_rule(p)) // star_expression - ) - { - D(fprintf(stderr, "%*c+ star_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression")); - _res = star_expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// star_expression: '*' bitwise_or | expression -static expr_ty -star_expression_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, star_expression_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '*' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ star_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Starred ( a , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' bitwise_or")); - } - { // expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); - expr_ty expression_var; - if ( - (expression_var = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ star_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); - _res = expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, star_expression_type, _res); - D(p->level--); - return _res; -} - -// star_named_expressions: ','.star_named_expression+ ','? -static asdl_seq* -star_named_expressions_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.star_named_expression+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_named_expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_named_expression+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - if ( - (a = _gather_72_rule(p)) // ','.star_named_expression+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ star_named_expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_named_expression+ ','?")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_named_expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_named_expression+ ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// star_named_expression: '*' bitwise_or | named_expression -static expr_ty -star_named_expression_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '*' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ star_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' bitwise_or")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Starred ( a , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_named_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' bitwise_or")); - } - { // named_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); - expr_ty named_expression_var; - if ( - (named_expression_var = named_expression_rule(p)) // named_expression - ) - { - D(fprintf(stderr, "%*c+ star_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); - _res = named_expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_named_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// named_expression: NAME ':=' expression | expression !':=' | invalid_named_expression -static expr_ty -named_expression_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME ':=' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = _PyPegen_name_token(p)) // NAME - && - (_literal = _PyPegen_expect_token(p, 53)) // token=':=' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_NamedExpr ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':=' expression")); - } - { // expression !':=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression !':='")); - expr_ty expression_var; - if ( - (expression_var = expression_rule(p)) // expression - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 53) // token=':=' - ) - { - D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression !':='")); - _res = expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression !':='")); - } - { // invalid_named_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_named_expression")); - void *invalid_named_expression_var; - if ( - (invalid_named_expression_var = invalid_named_expression_rule(p)) // invalid_named_expression - ) - { - D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_named_expression")); - _res = invalid_named_expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_named_expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// annotated_rhs: yield_expr | star_expressions -static expr_ty -annotated_rhs_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - { // yield_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> annotated_rhs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); - expr_ty yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - D(fprintf(stderr, "%*c+ annotated_rhs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); - _res = yield_expr_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s annotated_rhs[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); - } - { // star_expressions - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> annotated_rhs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - D(fprintf(stderr, "%*c+ annotated_rhs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - _res = star_expressions_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s annotated_rhs[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// expressions: expression ((',' expression))+ ','? | expression ',' | expression -static expr_ty -expressions_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // expression ((',' expression))+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ((',' expression))+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - expr_ty a; - asdl_seq * b; - if ( - (a = expression_rule(p)) // expression - && - (b = _loop1_74_rule(p)) // ((',' expression))+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ((',' expression))+ ','?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ((',' expression))+ ','?")); - } - { // expression ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ','")); - Token * _literal; - expr_ty a; - if ( - (a = expression_rule(p)) // expression - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ','")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( CHECK ( _PyPegen_singleton_seq ( p , a ) ) , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ','")); - } - { // expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> expressions[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); - expr_ty expression_var; - if ( - (expression_var = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ expressions[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); - _res = expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s expressions[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// expression: disjunction 'if' disjunction 'else' expression | disjunction | lambdef -static expr_ty -expression_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, expression_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // disjunction 'if' disjunction 'else' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); - Token * _keyword; - Token * _keyword_1; - expr_ty a; - expr_ty b; - expr_ty c; - if ( - (a = disjunction_rule(p)) // disjunction - && - (_keyword = _PyPegen_expect_token(p, 510)) // token='if' - && - (b = disjunction_rule(p)) // disjunction - && - (_keyword_1 = _PyPegen_expect_token(p, 516)) // token='else' - && - (c = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_IfExp ( b , a , c , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction 'if' disjunction 'else' expression")); - } - { // disjunction - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "disjunction")); - expr_ty disjunction_var; - if ( - (disjunction_var = disjunction_rule(p)) // disjunction - ) - { - D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "disjunction")); - _res = disjunction_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "disjunction")); - } - { // lambdef - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambdef")); - expr_ty lambdef_var; - if ( - (lambdef_var = lambdef_rule(p)) // lambdef - ) - { - D(fprintf(stderr, "%*c+ expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambdef")); - _res = lambdef_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambdef")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, expression_type, _res); - D(p->level--); - return _res; -} - -// lambdef: 'lambda' lambda_params? ':' expression -static expr_ty -lambdef_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'lambda' lambda_params? ':' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambdef[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' expression")); - Token * _keyword; - Token * _literal; - void *a; - expr_ty b; - if ( - (_keyword = _PyPegen_expect_token(p, 524)) // token='lambda' - && - (a = lambda_params_rule(p), 1) // lambda_params? - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ lambdef[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'lambda' lambda_params? ':' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Lambda ( ( a ) ? a : CHECK ( _PyPegen_empty_arguments ( p ) ) , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambdef[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'lambda' lambda_params? ':' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_params: invalid_lambda_parameters | lambda_parameters -static arguments_ty -lambda_params_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arguments_ty _res = NULL; - int _mark = p->mark; - { // invalid_lambda_parameters - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_lambda_parameters")); - void *invalid_lambda_parameters_var; - if ( - (invalid_lambda_parameters_var = invalid_lambda_parameters_rule(p)) // invalid_lambda_parameters - ) - { - D(fprintf(stderr, "%*c+ lambda_params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_lambda_parameters")); - _res = invalid_lambda_parameters_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_params[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_lambda_parameters")); - } - { // lambda_parameters - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_params[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_parameters")); - arguments_ty lambda_parameters_var; - if ( - (lambda_parameters_var = lambda_parameters_rule(p)) // lambda_parameters - ) - { - D(fprintf(stderr, "%*c+ lambda_params[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_parameters")); - _res = lambda_parameters_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_params[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_parameters")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_parameters: -// | lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? -// | lambda_slash_with_default lambda_param_with_default* lambda_star_etc? -// | lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? -// | lambda_param_with_default+ lambda_star_etc? -// | lambda_star_etc -static arguments_ty -lambda_parameters_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arguments_ty _res = NULL; - int _mark = p->mark; - { // lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); - asdl_seq* a; - asdl_seq * b; - asdl_seq * c; - void *d; - if ( - (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default - && - (b = _loop0_75_rule(p)) // lambda_param_no_default* - && - (c = _loop0_76_rule(p)) // lambda_param_with_default* - && - (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? - ) - { - D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); - _res = _PyPegen_make_arguments ( p , a , NULL , b , c , d ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_no_default lambda_param_no_default* lambda_param_with_default* lambda_star_etc?")); - } - { // lambda_slash_with_default lambda_param_with_default* lambda_star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); - SlashWithDefault* a; - asdl_seq * b; - void *c; - if ( - (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default - && - (b = _loop0_77_rule(p)) // lambda_param_with_default* - && - (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? - ) - { - D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); - _res = _PyPegen_make_arguments ( p , NULL , a , NULL , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default lambda_param_with_default* lambda_star_etc?")); - } - { // lambda_param_no_default+ lambda_param_with_default* lambda_star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); - asdl_seq * a; - asdl_seq * b; - void *c; - if ( - (a = _loop1_78_rule(p)) // lambda_param_no_default+ - && - (b = _loop0_79_rule(p)) // lambda_param_with_default* - && - (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? - ) - { - D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); - _res = _PyPegen_make_arguments ( p , NULL , NULL , a , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ lambda_param_with_default* lambda_star_etc?")); - } - { // lambda_param_with_default+ lambda_star_etc? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); - asdl_seq * a; - void *b; - if ( - (a = _loop1_80_rule(p)) // lambda_param_with_default+ - && - (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? - ) - { - D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); - _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+ lambda_star_etc?")); - } - { // lambda_star_etc - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_star_etc")); - StarEtc* a; - if ( - (a = lambda_star_etc_rule(p)) // lambda_star_etc - ) - { - D(fprintf(stderr, "%*c+ lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_star_etc")); - _res = _PyPegen_make_arguments ( p , NULL , NULL , NULL , NULL , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_star_etc")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_slash_no_default: -// | lambda_param_no_default+ '/' ',' -// | lambda_param_no_default+ '/' &':' -static asdl_seq* -lambda_slash_no_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // lambda_param_no_default+ '/' ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' ','")); - Token * _literal; - Token * _literal_1; - asdl_seq * a; - if ( - (a = _loop1_81_rule(p)) // lambda_param_no_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ lambda_slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' ','")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ '/' ','")); - } - { // lambda_param_no_default+ '/' &':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_slash_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' &':'")); - Token * _literal; - asdl_seq * a; - if ( - (a = _loop1_82_rule(p)) // lambda_param_no_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' - ) - { - D(fprintf(stderr, "%*c+ lambda_slash_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default+ '/' &':'")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_slash_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default+ '/' &':'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_slash_with_default: -// | lambda_param_no_default* lambda_param_with_default+ '/' ',' -// | lambda_param_no_default* lambda_param_with_default+ '/' &':' -static SlashWithDefault* -lambda_slash_with_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - SlashWithDefault* _res = NULL; - int _mark = p->mark; - { // lambda_param_no_default* lambda_param_with_default+ '/' ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); - Token * _literal; - Token * _literal_1; - asdl_seq * a; - asdl_seq * b; - if ( - (a = _loop0_83_rule(p)) // lambda_param_no_default* - && - (b = _loop1_84_rule(p)) // lambda_param_with_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ lambda_slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); - _res = _PyPegen_slash_with_default ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' ','")); - } - { // lambda_param_no_default* lambda_param_with_default+ '/' &':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_slash_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); - Token * _literal; - asdl_seq * a; - asdl_seq * b; - if ( - (a = _loop0_85_rule(p)) // lambda_param_no_default* - && - (b = _loop1_86_rule(p)) // lambda_param_with_default+ - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' - ) - { - D(fprintf(stderr, "%*c+ lambda_slash_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); - _res = _PyPegen_slash_with_default ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_slash_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* lambda_param_with_default+ '/' &':'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_star_etc: -// | '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? -// | '*' ',' lambda_param_maybe_default+ lambda_kwds? -// | lambda_kwds -// | invalid_lambda_star_etc -static StarEtc* -lambda_star_etc_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - StarEtc* _res = NULL; - int _mark = p->mark; - { // '*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); - Token * _literal; - arg_ty a; - asdl_seq * b; - void *c; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = lambda_param_no_default_rule(p)) // lambda_param_no_default - && - (b = _loop0_87_rule(p)) // lambda_param_maybe_default* - && - (c = lambda_kwds_rule(p), 1) // lambda_kwds? - ) - { - D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); - _res = _PyPegen_star_etc ( p , a , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' lambda_param_no_default lambda_param_maybe_default* lambda_kwds?")); - } - { // '*' ',' lambda_param_maybe_default+ lambda_kwds? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); - Token * _literal; - Token * _literal_1; - asdl_seq * b; - void *c; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - && - (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ - && - (c = lambda_kwds_rule(p), 1) // lambda_kwds? - ) - { - D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); - _res = _PyPegen_star_etc ( p , NULL , b , c ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' lambda_param_maybe_default+ lambda_kwds?")); - } - { // lambda_kwds - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_kwds")); - arg_ty a; - if ( - (a = lambda_kwds_rule(p)) // lambda_kwds - ) - { - D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_kwds")); - _res = _PyPegen_star_etc ( p , NULL , NULL , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_kwds")); - } - { // invalid_lambda_star_etc - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_lambda_star_etc")); - void *invalid_lambda_star_etc_var; - if ( - (invalid_lambda_star_etc_var = invalid_lambda_star_etc_rule(p)) // invalid_lambda_star_etc - ) - { - D(fprintf(stderr, "%*c+ lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_lambda_star_etc")); - _res = invalid_lambda_star_etc_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_lambda_star_etc")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_kwds: '**' lambda_param_no_default -static arg_ty -lambda_kwds_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arg_ty _res = NULL; - int _mark = p->mark; - { // '**' lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_kwds[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' lambda_param_no_default")); - Token * _literal; - arg_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - && - (a = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - D(fprintf(stderr, "%*c+ lambda_kwds[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' lambda_param_no_default")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_kwds[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' lambda_param_no_default")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_param_no_default: lambda_param ',' | lambda_param &':' -static arg_ty -lambda_param_no_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arg_ty _res = NULL; - int _mark = p->mark; - { // lambda_param ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param ','")); - Token * _literal; - arg_ty a; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ lambda_param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param ','")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_param_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param ','")); - } - { // lambda_param &':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_param_no_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param &':'")); - arg_ty a; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' - ) - { - D(fprintf(stderr, "%*c+ lambda_param_no_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param &':'")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_param_no_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param &':'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_param_with_default: lambda_param default ',' | lambda_param default &':' -static NameDefaultPair* -lambda_param_with_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - NameDefaultPair* _res = NULL; - int _mark = p->mark; - { // lambda_param default ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default ','")); - Token * _literal; - arg_ty a; - expr_ty c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p)) // default - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ lambda_param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default ','")); - _res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_param_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default ','")); - } - { // lambda_param default &':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_param_with_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default &':'")); - arg_ty a; - expr_ty c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p)) // default - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' - ) - { - D(fprintf(stderr, "%*c+ lambda_param_with_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default &':'")); - _res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_param_with_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default &':'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_param_maybe_default: lambda_param default? ',' | lambda_param default? &':' -static NameDefaultPair* -lambda_param_maybe_default_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - NameDefaultPair* _res = NULL; - int _mark = p->mark; - { // lambda_param default? ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default? ','")); - Token * _literal; - arg_ty a; - void *c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p), 1) // default? - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ lambda_param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default? ','")); - _res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default? ','")); - } - { // lambda_param default? &':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_param_maybe_default[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param default? &':'")); - arg_ty a; - void *c; - if ( - (a = lambda_param_rule(p)) // lambda_param - && - (c = default_rule(p), 1) // default? - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 11) // token=':' - ) - { - D(fprintf(stderr, "%*c+ lambda_param_maybe_default[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param default? &':'")); - _res = _PyPegen_name_default_pair ( p , a , c , NULL ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_param_maybe_default[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param default? &':'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lambda_param: NAME -static arg_ty -lambda_param_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - arg_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lambda_param[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); - expr_ty a; - if ( - (a = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ lambda_param[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_arg ( a -> v . Name . id , NULL , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lambda_param[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// disjunction: conjunction (('or' conjunction))+ | conjunction -static expr_ty -disjunction_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, disjunction_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // conjunction (('or' conjunction))+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> disjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); - expr_ty a; - asdl_seq * b; - if ( - (a = conjunction_rule(p)) // conjunction - && - (b = _loop1_89_rule(p)) // (('or' conjunction))+ - ) - { - D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BoolOp ( Or , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s disjunction[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "conjunction (('or' conjunction))+")); - } - { // conjunction - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> disjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "conjunction")); - expr_ty conjunction_var; - if ( - (conjunction_var = conjunction_rule(p)) // conjunction - ) - { - D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction")); - _res = conjunction_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s disjunction[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "conjunction")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, disjunction_type, _res); - D(p->level--); - return _res; -} - -// conjunction: inversion (('and' inversion))+ | inversion -static expr_ty -conjunction_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, conjunction_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // inversion (('and' inversion))+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> conjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); - expr_ty a; - asdl_seq * b; - if ( - (a = inversion_rule(p)) // inversion - && - (b = _loop1_90_rule(p)) // (('and' inversion))+ - ) - { - D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BoolOp ( And , CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s conjunction[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "inversion (('and' inversion))+")); - } - { // inversion - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> conjunction[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "inversion")); - expr_ty inversion_var; - if ( - (inversion_var = inversion_rule(p)) // inversion - ) - { - D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion")); - _res = inversion_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s conjunction[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "inversion")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, conjunction_type, _res); - D(p->level--); - return _res; -} - -// inversion: 'not' inversion | comparison -static expr_ty -inversion_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, inversion_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'not' inversion - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> inversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'not' inversion")); - Token * _keyword; - expr_ty a; - if ( - (_keyword = _PyPegen_expect_token(p, 525)) // token='not' - && - (a = inversion_rule(p)) // inversion - ) - { - D(fprintf(stderr, "%*c+ inversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'not' inversion")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_UnaryOp ( Not , a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s inversion[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'not' inversion")); - } - { // comparison - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> inversion[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "comparison")); - expr_ty comparison_var; - if ( - (comparison_var = comparison_rule(p)) // comparison - ) - { - D(fprintf(stderr, "%*c+ inversion[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "comparison")); - _res = comparison_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s inversion[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "comparison")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, inversion_type, _res); - D(p->level--); - return _res; -} - -// comparison: bitwise_or compare_op_bitwise_or_pair+ | bitwise_or -static expr_ty -comparison_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // bitwise_or compare_op_bitwise_or_pair+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> comparison[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); - expr_ty a; - asdl_seq * b; - if ( - (a = bitwise_or_rule(p)) // bitwise_or - && - (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ - ) - { - D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Compare ( a , CHECK ( _PyPegen_get_cmpops ( p , b ) ) , CHECK ( _PyPegen_get_exprs ( p , b ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s comparison[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); - } - { // bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> comparison[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or")); - expr_ty bitwise_or_var; - if ( - (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or")); - _res = bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s comparison[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// compare_op_bitwise_or_pair: -// | eq_bitwise_or -// | noteq_bitwise_or -// | lte_bitwise_or -// | lt_bitwise_or -// | gte_bitwise_or -// | gt_bitwise_or -// | notin_bitwise_or -// | in_bitwise_or -// | isnot_bitwise_or -// | is_bitwise_or -static CmpopExprPair* -compare_op_bitwise_or_pair_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // eq_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "eq_bitwise_or")); - CmpopExprPair* eq_bitwise_or_var; - if ( - (eq_bitwise_or_var = eq_bitwise_or_rule(p)) // eq_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "eq_bitwise_or")); - _res = eq_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "eq_bitwise_or")); - } - { // noteq_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "noteq_bitwise_or")); - CmpopExprPair* noteq_bitwise_or_var; - if ( - (noteq_bitwise_or_var = noteq_bitwise_or_rule(p)) // noteq_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "noteq_bitwise_or")); - _res = noteq_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "noteq_bitwise_or")); - } - { // lte_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lte_bitwise_or")); - CmpopExprPair* lte_bitwise_or_var; - if ( - (lte_bitwise_or_var = lte_bitwise_or_rule(p)) // lte_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lte_bitwise_or")); - _res = lte_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lte_bitwise_or")); - } - { // lt_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lt_bitwise_or")); - CmpopExprPair* lt_bitwise_or_var; - if ( - (lt_bitwise_or_var = lt_bitwise_or_rule(p)) // lt_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lt_bitwise_or")); - _res = lt_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lt_bitwise_or")); - } - { // gte_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "gte_bitwise_or")); - CmpopExprPair* gte_bitwise_or_var; - if ( - (gte_bitwise_or_var = gte_bitwise_or_rule(p)) // gte_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "gte_bitwise_or")); - _res = gte_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "gte_bitwise_or")); - } - { // gt_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "gt_bitwise_or")); - CmpopExprPair* gt_bitwise_or_var; - if ( - (gt_bitwise_or_var = gt_bitwise_or_rule(p)) // gt_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "gt_bitwise_or")); - _res = gt_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "gt_bitwise_or")); - } - { // notin_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "notin_bitwise_or")); - CmpopExprPair* notin_bitwise_or_var; - if ( - (notin_bitwise_or_var = notin_bitwise_or_rule(p)) // notin_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "notin_bitwise_or")); - _res = notin_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "notin_bitwise_or")); - } - { // in_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "in_bitwise_or")); - CmpopExprPair* in_bitwise_or_var; - if ( - (in_bitwise_or_var = in_bitwise_or_rule(p)) // in_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "in_bitwise_or")); - _res = in_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "in_bitwise_or")); - } - { // isnot_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "isnot_bitwise_or")); - CmpopExprPair* isnot_bitwise_or_var; - if ( - (isnot_bitwise_or_var = isnot_bitwise_or_rule(p)) // isnot_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "isnot_bitwise_or")); - _res = isnot_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "isnot_bitwise_or")); - } - { // is_bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> compare_op_bitwise_or_pair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "is_bitwise_or")); - CmpopExprPair* is_bitwise_or_var; - if ( - (is_bitwise_or_var = is_bitwise_or_rule(p)) // is_bitwise_or - ) - { - D(fprintf(stderr, "%*c+ compare_op_bitwise_or_pair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "is_bitwise_or")); - _res = is_bitwise_or_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s compare_op_bitwise_or_pair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "is_bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// eq_bitwise_or: '==' bitwise_or -static CmpopExprPair* -eq_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // '==' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> eq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'==' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 27)) // token='==' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ eq_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'==' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , Eq , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s eq_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'==' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// noteq_bitwise_or: ('!=') bitwise_or -static CmpopExprPair* -noteq_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // ('!=') bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); - void *_tmp_92_var; - expr_ty a; - if ( - (_tmp_92_var = _tmp_92_rule(p)) // '!=' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ noteq_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , NotEq , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s noteq_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('!=') bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lte_bitwise_or: '<=' bitwise_or -static CmpopExprPair* -lte_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // '<=' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lte_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<=' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 29)) // token='<=' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ lte_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<=' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , LtE , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lte_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<=' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// lt_bitwise_or: '<' bitwise_or -static CmpopExprPair* -lt_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // '<' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> lt_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'<' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 20)) // token='<' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ lt_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'<' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , Lt , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s lt_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'<' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// gte_bitwise_or: '>=' bitwise_or -static CmpopExprPair* -gte_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // '>=' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> gte_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>=' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 30)) // token='>=' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ gte_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>=' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , GtE , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s gte_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>=' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// gt_bitwise_or: '>' bitwise_or -static CmpopExprPair* -gt_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // '>' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> gt_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'>' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 21)) // token='>' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ gt_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'>' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , Gt , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s gt_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'>' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// notin_bitwise_or: 'not' 'in' bitwise_or -static CmpopExprPair* -notin_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // 'not' 'in' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> notin_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'not' 'in' bitwise_or")); - Token * _keyword; - Token * _keyword_1; - expr_ty a; - if ( - (_keyword = _PyPegen_expect_token(p, 525)) // token='not' - && - (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ notin_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'not' 'in' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , NotIn , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s notin_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'not' 'in' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// in_bitwise_or: 'in' bitwise_or -static CmpopExprPair* -in_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // 'in' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> in_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'in' bitwise_or")); - Token * _keyword; - expr_ty a; - if ( - (_keyword = _PyPegen_expect_token(p, 518)) // token='in' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ in_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'in' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , In , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s in_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'in' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// isnot_bitwise_or: 'is' 'not' bitwise_or -static CmpopExprPair* -isnot_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // 'is' 'not' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> isnot_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'is' 'not' bitwise_or")); - Token * _keyword; - Token * _keyword_1; - expr_ty a; - if ( - (_keyword = _PyPegen_expect_token(p, 526)) // token='is' - && - (_keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ isnot_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'is' 'not' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , IsNot , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s isnot_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'is' 'not' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// is_bitwise_or: 'is' bitwise_or -static CmpopExprPair* -is_bitwise_or_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - CmpopExprPair* _res = NULL; - int _mark = p->mark; - { // 'is' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> is_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'is' bitwise_or")); - Token * _keyword; - expr_ty a; - if ( - (_keyword = _PyPegen_expect_token(p, 526)) // token='is' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ is_bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'is' bitwise_or")); - _res = _PyPegen_cmpop_expr_pair ( p , Is , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s is_bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'is' bitwise_or")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// Left-recursive -// bitwise_or: bitwise_or '|' bitwise_xor | bitwise_xor -static expr_ty bitwise_or_raw(Parser *); -static expr_ty -bitwise_or_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, bitwise_or_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_1 = _PyPegen_update_memo(p, _mark, bitwise_or_type, _res); - if (tmpvar_1) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = bitwise_or_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -bitwise_or_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // bitwise_or '|' bitwise_xor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_or '|' bitwise_xor")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = bitwise_or_rule(p)) // bitwise_or - && - (_literal = _PyPegen_expect_token(p, 18)) // token='|' - && - (b = bitwise_xor_rule(p)) // bitwise_xor - ) - { - D(fprintf(stderr, "%*c+ bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or '|' bitwise_xor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , BitOr , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_or '|' bitwise_xor")); - } - { // bitwise_xor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_xor")); - expr_ty bitwise_xor_var; - if ( - (bitwise_xor_var = bitwise_xor_rule(p)) // bitwise_xor - ) - { - D(fprintf(stderr, "%*c+ bitwise_or[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_xor")); - _res = bitwise_xor_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s bitwise_or[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_xor")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// Left-recursive -// bitwise_xor: bitwise_xor '^' bitwise_and | bitwise_and -static expr_ty bitwise_xor_raw(Parser *); -static expr_ty -bitwise_xor_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, bitwise_xor_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_2 = _PyPegen_update_memo(p, _mark, bitwise_xor_type, _res); - if (tmpvar_2) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = bitwise_xor_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -bitwise_xor_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // bitwise_xor '^' bitwise_and - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> bitwise_xor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_xor '^' bitwise_and")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = bitwise_xor_rule(p)) // bitwise_xor - && - (_literal = _PyPegen_expect_token(p, 32)) // token='^' - && - (b = bitwise_and_rule(p)) // bitwise_and - ) - { - D(fprintf(stderr, "%*c+ bitwise_xor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_xor '^' bitwise_and")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , BitXor , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s bitwise_xor[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_xor '^' bitwise_and")); - } - { // bitwise_and - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> bitwise_xor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_and")); - expr_ty bitwise_and_var; - if ( - (bitwise_and_var = bitwise_and_rule(p)) // bitwise_and - ) - { - D(fprintf(stderr, "%*c+ bitwise_xor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_and")); - _res = bitwise_and_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s bitwise_xor[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_and")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// Left-recursive -// bitwise_and: bitwise_and '&' shift_expr | shift_expr -static expr_ty bitwise_and_raw(Parser *); -static expr_ty -bitwise_and_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, bitwise_and_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_3 = _PyPegen_update_memo(p, _mark, bitwise_and_type, _res); - if (tmpvar_3) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = bitwise_and_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -bitwise_and_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // bitwise_and '&' shift_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> bitwise_and[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "bitwise_and '&' shift_expr")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = bitwise_and_rule(p)) // bitwise_and - && - (_literal = _PyPegen_expect_token(p, 19)) // token='&' - && - (b = shift_expr_rule(p)) // shift_expr - ) - { - D(fprintf(stderr, "%*c+ bitwise_and[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_and '&' shift_expr")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , BitAnd , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s bitwise_and[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "bitwise_and '&' shift_expr")); - } - { // shift_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> bitwise_and[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr")); - expr_ty shift_expr_var; - if ( - (shift_expr_var = shift_expr_rule(p)) // shift_expr - ) - { - D(fprintf(stderr, "%*c+ bitwise_and[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr")); - _res = shift_expr_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s bitwise_and[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// Left-recursive -// shift_expr: shift_expr '<<' sum | shift_expr '>>' sum | sum -static expr_ty shift_expr_raw(Parser *); -static expr_ty -shift_expr_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, shift_expr_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_4 = _PyPegen_update_memo(p, _mark, shift_expr_type, _res); - if (tmpvar_4) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = shift_expr_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -shift_expr_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // shift_expr '<<' sum - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr '<<' sum")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = shift_expr_rule(p)) // shift_expr - && - (_literal = _PyPegen_expect_token(p, 33)) // token='<<' - && - (b = sum_rule(p)) // sum - ) - { - D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr '<<' sum")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , LShift , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr '<<' sum")); - } - { // shift_expr '>>' sum - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "shift_expr '>>' sum")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = shift_expr_rule(p)) // shift_expr - && - (_literal = _PyPegen_expect_token(p, 34)) // token='>>' - && - (b = sum_rule(p)) // sum - ) - { - D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "shift_expr '>>' sum")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , RShift , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "shift_expr '>>' sum")); - } - { // sum - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> shift_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum")); - expr_ty sum_var; - if ( - (sum_var = sum_rule(p)) // sum - ) - { - D(fprintf(stderr, "%*c+ shift_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum")); - _res = sum_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s shift_expr[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// Left-recursive -// sum: sum '+' term | sum '-' term | term -static expr_ty sum_raw(Parser *); -static expr_ty -sum_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, sum_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_5 = _PyPegen_update_memo(p, _mark, sum_type, _res); - if (tmpvar_5) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = sum_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -sum_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // sum '+' term - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum '+' term")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = sum_rule(p)) // sum - && - (_literal = _PyPegen_expect_token(p, 14)) // token='+' - && - (b = term_rule(p)) // term - ) - { - D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum '+' term")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , Add , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum '+' term")); - } - { // sum '-' term - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "sum '-' term")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = sum_rule(p)) // sum - && - (_literal = _PyPegen_expect_token(p, 15)) // token='-' - && - (b = term_rule(p)) // term - ) - { - D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "sum '-' term")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , Sub , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "sum '-' term")); - } - { // term - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> sum[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term")); - expr_ty term_var; - if ( - (term_var = term_rule(p)) // term - ) - { - D(fprintf(stderr, "%*c+ sum[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term")); - _res = term_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s sum[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// Left-recursive -// term: -// | term '*' factor -// | term '/' factor -// | term '//' factor -// | term '%' factor -// | term '@' factor -// | factor -static expr_ty term_raw(Parser *); -static expr_ty -term_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, term_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_6 = _PyPegen_update_memo(p, _mark, term_type, _res); - if (tmpvar_6) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = term_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -term_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // term '*' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '*' factor")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = term_rule(p)) // term - && - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (b = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '*' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , Mult , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '*' factor")); - } - { // term '/' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '/' factor")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = term_rule(p)) // term - && - (_literal = _PyPegen_expect_token(p, 17)) // token='/' - && - (b = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '/' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , Div , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '/' factor")); - } - { // term '//' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '//' factor")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = term_rule(p)) // term - && - (_literal = _PyPegen_expect_token(p, 47)) // token='//' - && - (b = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '//' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , FloorDiv , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '//' factor")); - } - { // term '%' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '%' factor")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = term_rule(p)) // term - && - (_literal = _PyPegen_expect_token(p, 24)) // token='%' - && - (b = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '%' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , Mod , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '%' factor")); - } - { // term '@' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "term '@' factor")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = term_rule(p)) // term - && - (_literal = _PyPegen_expect_token(p, 49)) // token='@' - && - (b = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "term '@' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 5 , "The '@' operator is" , _Py_BinOp ( a , MatMult , b , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "term '@' factor")); - } - { // factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> term[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "factor")); - expr_ty factor_var; - if ( - (factor_var = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ term[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "factor")); - _res = factor_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s term[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "factor")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// factor: '+' factor | '-' factor | '~' factor | power -static expr_ty -factor_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, factor_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '+' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'+' factor")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 14)) // token='+' - && - (a = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'+' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_UnaryOp ( UAdd , a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'+' factor")); - } - { // '-' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'-' factor")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 15)) // token='-' - && - (a = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'-' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_UnaryOp ( USub , a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'-' factor")); - } - { // '~' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'~' factor")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 31)) // token='~' - && - (a = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'~' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_UnaryOp ( Invert , a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'~' factor")); - } - { // power - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> factor[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "power")); - expr_ty power_var; - if ( - (power_var = power_rule(p)) // power - ) - { - D(fprintf(stderr, "%*c+ factor[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "power")); - _res = power_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s factor[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "power")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, factor_type, _res); - D(p->level--); - return _res; -} - -// power: await_primary '**' factor | await_primary -static expr_ty -power_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // await_primary '**' factor - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> power[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "await_primary '**' factor")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = await_primary_rule(p)) // await_primary - && - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - && - (b = factor_rule(p)) // factor - ) - { - D(fprintf(stderr, "%*c+ power[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "await_primary '**' factor")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_BinOp ( a , Pow , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s power[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "await_primary '**' factor")); - } - { // await_primary - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> power[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "await_primary")); - expr_ty await_primary_var; - if ( - (await_primary_var = await_primary_rule(p)) // await_primary - ) - { - D(fprintf(stderr, "%*c+ power[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "await_primary")); - _res = await_primary_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s power[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "await_primary")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// await_primary: AWAIT primary | primary -static expr_ty -await_primary_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, await_primary_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // AWAIT primary - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); - expr_ty a; - Token * await_var; - if ( - (await_var = _PyPegen_expect_token(p, AWAIT)) // token='AWAIT' - && - (a = primary_rule(p)) // primary - ) - { - D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "AWAIT primary")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = CHECK_VERSION ( 5 , "Await expressions are" , _Py_Await ( a , EXTRA ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "AWAIT primary")); - } - { // primary - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> await_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary")); - expr_ty primary_var; - if ( - (primary_var = primary_rule(p)) // primary - ) - { - D(fprintf(stderr, "%*c+ await_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary")); - _res = primary_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s await_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, await_primary_type, _res); - D(p->level--); - return _res; -} - -// Left-recursive -// primary: -// | primary '.' NAME -// | primary genexp -// | primary '(' arguments? ')' -// | primary '[' slices ']' -// | atom -static expr_ty primary_raw(Parser *); -static expr_ty -primary_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, primary_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_7 = _PyPegen_update_memo(p, _mark, primary_type, _res); - if (tmpvar_7) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = primary_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -primary_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // primary '.' NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '.' NAME")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = primary_rule(p)) // primary - && - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - && - (b = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '.' NAME")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '.' NAME")); - } - { // primary genexp - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary genexp")); - expr_ty a; - expr_ty b; - if ( - (a = primary_rule(p)) // primary - && - (b = genexp_rule(p)) // genexp - ) - { - D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary genexp")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary genexp")); - } - { // primary '(' arguments? ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '(' arguments? ')'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - void *b; - if ( - (a = primary_rule(p)) // primary - && - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (b = arguments_rule(p), 1) // arguments? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '(' arguments? ')'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '(' arguments? ')'")); - } - { // primary '[' slices ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "primary '[' slices ']'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - expr_ty b; - if ( - (a = primary_rule(p)) // primary - && - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "primary '[' slices ']'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Subscript ( a , b , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "primary '[' slices ']'")); - } - { // atom - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "atom")); - expr_ty atom_var; - if ( - (atom_var = atom_rule(p)) // atom - ) - { - D(fprintf(stderr, "%*c+ primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "atom")); - _res = atom_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "atom")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// slices: slice !',' | ','.slice+ ','? -static expr_ty -slices_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // slice !',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slices[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice !','")); - expr_ty a; - if ( - (a = slice_rule(p)) // slice - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' - ) - { - D(fprintf(stderr, "%*c+ slices[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice !','")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slices[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice !','")); - } - { // ','.slice+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slices[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.slice+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - if ( - (a = _gather_93_rule(p)) // ','.slice+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ slices[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.slice+ ','?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( a , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slices[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.slice+ ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// slice: expression? ':' expression? [':' expression?] | expression -static expr_ty -slice_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // expression? ':' expression? [':' expression?] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slice[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); - Token * _literal; - void *a; - void *b; - void *c; - if ( - (a = expression_rule(p), 1) // expression? - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = expression_rule(p), 1) // expression? - && - (c = _tmp_95_rule(p), 1) // [':' expression?] - ) - { - D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Slice ( a , b , c , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slice[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression? ':' expression? [':' expression?]")); - } - { // expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> slice[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); - expr_ty a; - if ( - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s slice[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// atom: -// | NAME -// | 'True' -// | 'False' -// | 'None' -// | &STRING strings -// | NUMBER -// | &'(' (tuple | group | genexp) -// | &'[' (list | listcomp) -// | &'{' (dict | set | dictcomp | setcomp) -// | '...' -static expr_ty -atom_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); - expr_ty name_var; - if ( - (name_var = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); - _res = name_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); - } - { // 'True' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 527)) // token='True' - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Constant ( Py_True , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'True'")); - } - { // 'False' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 528)) // token='False' - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Constant ( Py_False , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'False'")); - } - { // 'None' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 529)) // token='None' - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Constant ( Py_None , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'None'")); - } - { // &STRING strings - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&STRING strings")); - expr_ty strings_var; - if ( - _PyPegen_lookahead(1, _PyPegen_string_token, p) - && - (strings_var = strings_rule(p)) // strings - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&STRING strings")); - _res = strings_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&STRING strings")); - } - { // NUMBER - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NUMBER")); - expr_ty number_var; - if ( - (number_var = _PyPegen_number_token(p)) // NUMBER - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NUMBER")); - _res = number_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NUMBER")); - } - { // &'(' (tuple | group | genexp) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); - void *_tmp_96_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' - && - (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); - _res = _tmp_96_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'(' (tuple | group | genexp)")); - } - { // &'[' (list | listcomp) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); - void *_tmp_97_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' - && - (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); - _res = _tmp_97_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'[' (list | listcomp)")); - } - { // &'{' (dict | set | dictcomp | setcomp) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); - void *_tmp_98_var; - if ( - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' - && - (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); - _res = _tmp_98_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); - } - { // '...' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 52)) // token='...' - ) - { - D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Constant ( Py_Ellipsis , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// strings: STRING+ -static expr_ty -strings_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, strings_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - { // STRING+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING+")); - asdl_seq * a; - if ( - (a = _loop1_99_rule(p)) // STRING+ - ) - { - D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING+")); - _res = _PyPegen_concatenate_strings ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s strings[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING+")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, strings_type, _res); - D(p->level--); - return _res; -} - -// list: '[' star_named_expressions? ']' -static expr_ty -list_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '[' star_named_expressions? ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> list[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' star_named_expressions? ']'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (a = star_named_expressions_rule(p), 1) // star_named_expressions? - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ list[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' star_named_expressions? ']'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_List ( a , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s list[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' star_named_expressions? ']'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension -static expr_ty -listcomp_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '[' named_expression for_if_clauses ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - asdl_seq* b; - if ( - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (a = named_expression_rule(p)) // named_expression - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_ListComp ( a , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s listcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' named_expression for_if_clauses ']'")); - } - { // invalid_comprehension - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); - void *invalid_comprehension_var; - if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension - ) - { - D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); - _res = invalid_comprehension_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s listcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// tuple: '(' [star_named_expression ',' star_named_expressions?] ')' -static expr_ty -tuple_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '(' [star_named_expression ',' star_named_expressions?] ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> tuple[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ tuple[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( a , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s tuple[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' [star_named_expression ',' star_named_expressions?] ')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// group: '(' (yield_expr | named_expression) ')' -static expr_ty -group_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - { // '(' (yield_expr | named_expression) ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> group[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = _tmp_101_rule(p)) // yield_expr | named_expression - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ group[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s group[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// genexp: '(' expression for_if_clauses ')' | invalid_comprehension -static expr_ty -genexp_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '(' expression for_if_clauses ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - asdl_seq* b; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = expression_rule(p)) // expression - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_GeneratorExp ( a , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s genexp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' expression for_if_clauses ')'")); - } - { // invalid_comprehension - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); - void *invalid_comprehension_var; - if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension - ) - { - D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); - _res = invalid_comprehension_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s genexp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// set: '{' expressions_list '}' -static expr_ty -set_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '{' expressions_list '}' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> set[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expressions_list '}'")); - Token * _literal; - Token * _literal_1; - asdl_seq* a; - if ( - (_literal = _PyPegen_expect_token(p, 25)) // token='{' - && - (a = expressions_list_rule(p)) // expressions_list - && - (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' - ) - { - D(fprintf(stderr, "%*c+ set[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expressions_list '}'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Set ( a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s set[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expressions_list '}'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// setcomp: '{' expression for_if_clauses '}' | invalid_comprehension -static expr_ty -setcomp_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '{' expression for_if_clauses '}' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - asdl_seq* b; - if ( - (_literal = _PyPegen_expect_token(p, 25)) // token='{' - && - (a = expression_rule(p)) // expression - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' - ) - { - D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_SetComp ( a , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s setcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expression for_if_clauses '}'")); - } - { // invalid_comprehension - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); - void *invalid_comprehension_var; - if ( - (invalid_comprehension_var = invalid_comprehension_rule(p)) // invalid_comprehension - ) - { - D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_comprehension")); - _res = invalid_comprehension_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s setcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_comprehension")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// dict: '{' double_starred_kvpairs? '}' -static expr_ty -dict_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '{' double_starred_kvpairs? '}' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> dict[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' double_starred_kvpairs? '}'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 25)) // token='{' - && - (a = double_starred_kvpairs_rule(p), 1) // double_starred_kvpairs? - && - (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' - ) - { - D(fprintf(stderr, "%*c+ dict[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' double_starred_kvpairs? '}'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Dict ( CHECK ( _PyPegen_get_keys ( p , a ) ) , CHECK ( _PyPegen_get_values ( p , a ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s dict[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' double_starred_kvpairs? '}'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// dictcomp: '{' kvpair for_if_clauses '}' | invalid_dict_comprehension -static expr_ty -dictcomp_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '{' kvpair for_if_clauses '}' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> dictcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' kvpair for_if_clauses '}'")); - Token * _literal; - Token * _literal_1; - KeyValuePair* a; - asdl_seq* b; - if ( - (_literal = _PyPegen_expect_token(p, 25)) // token='{' - && - (a = kvpair_rule(p)) // kvpair - && - (b = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' - ) - { - D(fprintf(stderr, "%*c+ dictcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' kvpair for_if_clauses '}'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_DictComp ( a -> key , a -> value , b , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s dictcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' kvpair for_if_clauses '}'")); - } - { // invalid_dict_comprehension - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> dictcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_dict_comprehension")); - void *invalid_dict_comprehension_var; - if ( - (invalid_dict_comprehension_var = invalid_dict_comprehension_rule(p)) // invalid_dict_comprehension - ) - { - D(fprintf(stderr, "%*c+ dictcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_dict_comprehension")); - _res = invalid_dict_comprehension_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s dictcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_dict_comprehension")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// double_starred_kvpairs: ','.double_starred_kvpair+ ','? -static asdl_seq* -double_starred_kvpairs_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.double_starred_kvpair+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> double_starred_kvpairs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - if ( - (a = _gather_102_rule(p)) // ','.double_starred_kvpair+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ double_starred_kvpairs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.double_starred_kvpair+ ','?")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s double_starred_kvpairs[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.double_starred_kvpair+ ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// double_starred_kvpair: '**' bitwise_or | kvpair -static KeyValuePair* -double_starred_kvpair_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - KeyValuePair* _res = NULL; - int _mark = p->mark; - { // '**' bitwise_or - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> double_starred_kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' bitwise_or")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - && - (a = bitwise_or_rule(p)) // bitwise_or - ) - { - D(fprintf(stderr, "%*c+ double_starred_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' bitwise_or")); - _res = _PyPegen_key_value_pair ( p , NULL , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s double_starred_kvpair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' bitwise_or")); - } - { // kvpair - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> double_starred_kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kvpair")); - KeyValuePair* kvpair_var; - if ( - (kvpair_var = kvpair_rule(p)) // kvpair - ) - { - D(fprintf(stderr, "%*c+ double_starred_kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kvpair")); - _res = kvpair_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s double_starred_kvpair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kvpair")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// kvpair: expression ':' expression -static KeyValuePair* -kvpair_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - KeyValuePair* _res = NULL; - int _mark = p->mark; - { // expression ':' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kvpair[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = expression_rule(p)) // expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ kvpair[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); - _res = _PyPegen_key_value_pair ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kvpair[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// for_if_clauses: for_if_clause+ -static asdl_seq* -for_if_clauses_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // for_if_clause+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); - asdl_seq * _loop1_104_var; - if ( - (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ - ) - { - D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); - _res = _loop1_104_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s for_if_clauses[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// for_if_clause: -// | ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* -// | 'for' star_targets 'in' disjunction (('if' disjunction))* -static comprehension_ty -for_if_clause_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - comprehension_ty _res = NULL; - int _mark = p->mark; - { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); - Token * _keyword; - Token * _keyword_1; - expr_ty a; - Token * async_var; - expr_ty b; - asdl_seq * c; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - && - (_keyword = _PyPegen_expect_token(p, 517)) // token='for' - && - (a = star_targets_rule(p)) // star_targets - && - (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' - && - (b = disjunction_rule(p)) // disjunction - && - (c = _loop0_105_rule(p)) // (('if' disjunction))* - ) - { - D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); - _res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); - } - { // 'for' star_targets 'in' disjunction (('if' disjunction))* - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); - Token * _keyword; - Token * _keyword_1; - expr_ty a; - expr_ty b; - asdl_seq * c; - if ( - (_keyword = _PyPegen_expect_token(p, 517)) // token='for' - && - (a = star_targets_rule(p)) // star_targets - && - (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' - && - (b = disjunction_rule(p)) // disjunction - && - (c = _loop0_106_rule(p)) // (('if' disjunction))* - ) - { - D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); - _res = _Py_comprehension ( a , b , c , 0 , p -> arena ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// yield_expr: 'yield' 'from' expression | 'yield' star_expressions? -static expr_ty -yield_expr_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'yield' 'from' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> yield_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'yield' 'from' expression")); - Token * _keyword; - Token * _keyword_1; - expr_ty a; - if ( - (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' - && - (_keyword_1 = _PyPegen_expect_token(p, 514)) // token='from' - && - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ yield_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'yield' 'from' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_YieldFrom ( a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s yield_expr[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'yield' 'from' expression")); - } - { // 'yield' star_expressions? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> yield_expr[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'yield' star_expressions?")); - Token * _keyword; - void *a; - if ( - (_keyword = _PyPegen_expect_token(p, 504)) // token='yield' - && - (a = star_expressions_rule(p), 1) // star_expressions? - ) - { - D(fprintf(stderr, "%*c+ yield_expr[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'yield' star_expressions?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Yield ( a , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s yield_expr[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'yield' star_expressions?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// arguments: args ','? &')' | incorrect_arguments -static expr_ty -arguments_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, arguments_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - { // args ','? &')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ','? &')'")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - expr_ty a; - if ( - (a = args_rule(p)) // args - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - && - _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 8) // token=')' - ) - { - D(fprintf(stderr, "%*c+ arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ','? &')'")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s arguments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ','? &')'")); - } - { // incorrect_arguments - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "incorrect_arguments")); - void *incorrect_arguments_var; - if ( - (incorrect_arguments_var = incorrect_arguments_rule(p)) // incorrect_arguments - ) - { - D(fprintf(stderr, "%*c+ arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "incorrect_arguments")); - _res = incorrect_arguments_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s arguments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "incorrect_arguments")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, arguments_type, _res); - D(p->level--); - return _res; -} - -// args: starred_expression [',' args] | kwargs | named_expression [',' args] -static expr_ty -args_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // starred_expression [',' args] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression [',' args]")); - expr_ty a; - void *b; - if ( - (a = starred_expression_rule(p)) // starred_expression - && - (b = _tmp_107_rule(p), 1) // [',' args] - ) - { - D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression [',' args]")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression [',' args]")); - } - { // kwargs - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwargs")); - asdl_seq* a; - if ( - (a = kwargs_rule(p)) // kwargs - ) - { - D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwargs")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Call ( _PyPegen_dummy_name ( p ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_extract_starred_exprs ( p , a ) ) , CHECK_NULL_ALLOWED ( _PyPegen_seq_delete_starred_exprs ( p , a ) ) , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwargs")); - } - { // named_expression [',' args] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> args[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression [',' args]")); - expr_ty a; - void *b; - if ( - (a = named_expression_rule(p)) // named_expression - && - (b = _tmp_108_rule(p), 1) // [',' args] - ) - { - D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression [',' args]")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Call ( _PyPegen_dummy_name ( p ) , ( b ) ? CHECK ( _PyPegen_seq_insert_in_front ( p , a , ( ( expr_ty ) b ) -> v . Call . args ) ) : CHECK ( _PyPegen_singleton_seq ( p , a ) ) , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s args[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression [',' args]")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// kwargs: -// | ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ -// | ','.kwarg_or_starred+ -// | ','.kwarg_or_double_starred+ -static asdl_seq* -kwargs_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); - Token * _literal; - asdl_seq * a; - asdl_seq * b; - if ( - (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ - ) - { - D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); - _res = _PyPegen_join_sequences ( p , a , b ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); - } - { // ','.kwarg_or_starred+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - asdl_seq * _gather_113_var; - if ( - (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ - ) - { - D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - _res = _gather_113_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_starred+")); - } - { // ','.kwarg_or_double_starred+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - asdl_seq * _gather_115_var; - if ( - (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ - ) - { - D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - _res = _gather_115_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwargs[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.kwarg_or_double_starred+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// starred_expression: '*' expression -static expr_ty -starred_expression_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '*' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> starred_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' expression")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ starred_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Starred ( a , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s starred_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// kwarg_or_starred: NAME '=' expression | starred_expression | invalid_kwarg -static KeywordOrStarred* -kwarg_or_starred_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - KeywordOrStarred* _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME '=' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = _PyPegen_name_token(p)) // NAME - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression")); - } - { // starred_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "starred_expression")); - expr_ty a; - if ( - (a = starred_expression_rule(p)) // starred_expression - ) - { - D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression")); - _res = _PyPegen_keyword_or_starred ( p , a , 0 ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "starred_expression")); - } - { // invalid_kwarg - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwarg_or_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); - void *invalid_kwarg_var; - if ( - (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg - ) - { - D(fprintf(stderr, "%*c+ kwarg_or_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); - _res = invalid_kwarg_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwarg_or_starred[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_kwarg")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// kwarg_or_double_starred: NAME '=' expression | '**' expression | invalid_kwarg -static KeywordOrStarred* -kwarg_or_double_starred_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - KeywordOrStarred* _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME '=' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = _PyPegen_name_token(p)) // NAME - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (b = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME '=' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( a -> v . Name . id , b , EXTRA ) ) , 1 ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME '=' expression")); - } - { // '**' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**' expression")); - Token * _literal; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - && - (a = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**' expression")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _PyPegen_keyword_or_starred ( p , CHECK ( _Py_keyword ( NULL , a , EXTRA ) ) , 1 ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**' expression")); - } - { // invalid_kwarg - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> kwarg_or_double_starred[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); - void *invalid_kwarg_var; - if ( - (invalid_kwarg_var = invalid_kwarg_rule(p)) // invalid_kwarg - ) - { - D(fprintf(stderr, "%*c+ kwarg_or_double_starred[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_kwarg")); - _res = invalid_kwarg_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s kwarg_or_double_starred[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_kwarg")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// star_targets: star_target !',' | star_target ((',' star_target))* ','? -static expr_ty -star_targets_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // star_target !',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target !','")); - expr_ty a; - if ( - (a = star_target_rule(p)) // star_target - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 12) // token=',' - ) - { - D(fprintf(stderr, "%*c+ star_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target !','")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target !','")); - } - { // star_target ((',' star_target))* ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target ((',' star_target))* ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - expr_ty a; - asdl_seq * b; - if ( - (a = star_target_rule(p)) // star_target - && - (b = _loop0_117_rule(p)) // ((',' star_target))* - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ star_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target ((',' star_target))* ','?")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( CHECK ( _PyPegen_seq_insert_in_front ( p , a , b ) ) , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target ((',' star_target))* ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// star_targets_seq: ','.star_target+ ','? -static asdl_seq* -star_targets_seq_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.star_target+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_targets_seq[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.star_target+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - if ( - (a = _gather_118_rule(p)) // ','.star_target+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ star_targets_seq[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.star_target+ ','?")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_targets_seq[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.star_target+ ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// star_target: -// | '*' (!'*' star_target) -// | t_primary '.' NAME !t_lookahead -// | t_primary '[' slices ']' !t_lookahead -// | star_atom -static expr_ty -star_target_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, star_target_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '*' (!'*' star_target) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); - Token * _literal; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (a = _tmp_120_rule(p)) // !'*' star_target - ) - { - D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Starred ( CHECK ( _PyPegen_set_expr_context ( p , a , Store ) ) , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (!'*' star_target)")); - } - { // t_primary '.' NAME !t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - && - (b = _PyPegen_name_token(p)) // NAME - && - _PyPegen_lookahead(0, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - } - { // t_primary '[' slices ']' !t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - Token * _literal; - Token * _literal_1; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - && - _PyPegen_lookahead(0, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Subscript ( a , b , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - } - { // star_atom - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_atom")); - expr_ty star_atom_var; - if ( - (star_atom_var = star_atom_rule(p)) // star_atom - ) - { - D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_atom")); - _res = star_atom_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_atom")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, star_target_type, _res); - D(p->level--); - return _res; -} - -// star_atom: -// | NAME -// | '(' star_target ')' -// | '(' star_targets_seq? ')' -// | '[' star_targets_seq? ']' -static expr_ty -star_atom_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); - expr_ty a; - if ( - (a = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); - _res = _PyPegen_set_expr_context ( p , a , Store ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); - } - { // '(' star_target ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' star_target ')'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = star_target_rule(p)) // star_target - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' star_target ')'")); - _res = _PyPegen_set_expr_context ( p , a , Store ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' star_target ')'")); - } - { // '(' star_targets_seq? ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' star_targets_seq? ')'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = star_targets_seq_rule(p), 1) // star_targets_seq? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' star_targets_seq? ')'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( a , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' star_targets_seq? ')'")); - } - { // '[' star_targets_seq? ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> star_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' star_targets_seq? ']'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (a = star_targets_seq_rule(p), 1) // star_targets_seq? - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ star_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' star_targets_seq? ']'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_List ( a , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s star_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' star_targets_seq? ']'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// single_target: single_subscript_attribute_target | NAME | '(' single_target ')' -static expr_ty -single_target_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - { // single_subscript_attribute_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); - expr_ty single_subscript_attribute_target_var; - if ( - (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target - ) - { - D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); - _res = single_subscript_attribute_target_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target")); - } - { // NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); - expr_ty a; - if ( - (a = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); - _res = _PyPegen_set_expr_context ( p , a , Store ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); - } - { // '(' single_target ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> single_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = single_target_rule(p)) // single_target - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ single_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s single_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// single_subscript_attribute_target: -// | t_primary '.' NAME !t_lookahead -// | t_primary '[' slices ']' !t_lookahead -static expr_ty -single_subscript_attribute_target_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME !t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> single_subscript_attribute_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - && - (b = _PyPegen_name_token(p)) // NAME - && - _PyPegen_lookahead(0, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ single_subscript_attribute_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s single_subscript_attribute_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - } - { // t_primary '[' slices ']' !t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> single_subscript_attribute_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - Token * _literal; - Token * _literal_1; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - && - _PyPegen_lookahead(0, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ single_subscript_attribute_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Subscript ( a , b , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s single_subscript_attribute_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// del_targets: ','.del_target+ ','? -static asdl_seq* -del_targets_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.del_target+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.del_target+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - if ( - (a = _gather_121_rule(p)) // ','.del_target+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ del_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.del_target+ ','?")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.del_target+ ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// del_target: -// | t_primary '.' NAME &del_target_end -// | t_primary '[' slices ']' &del_target_end -// | del_t_atom -static expr_ty -del_target_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, del_target_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME &del_target_end - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - && - (b = _PyPegen_name_token(p)) // NAME - && - _PyPegen_lookahead(1, del_target_end_rule, p) - ) - { - D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Attribute ( a , b -> v . Name . id , Del , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME &del_target_end")); - } - { // t_primary '[' slices ']' &del_target_end - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); - Token * _literal; - Token * _literal_1; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - && - _PyPegen_lookahead(1, del_target_end_rule, p) - ) - { - D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Subscript ( a , b , Del , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); - } - { // del_t_atom - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_t_atom")); - expr_ty del_t_atom_var; - if ( - (del_t_atom_var = del_t_atom_rule(p)) // del_t_atom - ) - { - D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_t_atom")); - _res = del_t_atom_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_t_atom")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, del_target_type, _res); - D(p->level--); - return _res; -} - -// del_t_atom: -// | NAME &del_target_end -// | '(' del_target ')' -// | '(' del_targets? ')' -// | '[' del_targets? ']' -// | invalid_del_target -static expr_ty -del_t_atom_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME &del_target_end - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); - expr_ty a; - if ( - (a = _PyPegen_name_token(p)) // NAME - && - _PyPegen_lookahead(1, del_target_end_rule, p) - ) - { - D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); - _res = _PyPegen_set_expr_context ( p , a , Del ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME &del_target_end")); - } - { // '(' del_target ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' del_target ')'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = del_target_rule(p)) // del_target - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' del_target ')'")); - _res = _PyPegen_set_expr_context ( p , a , Del ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' del_target ')'")); - } - { // '(' del_targets? ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' del_targets? ')'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = del_targets_rule(p), 1) // del_targets? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' del_targets? ')'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( a , Del , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' del_targets? ')'")); - } - { // '[' del_targets? ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' del_targets? ']'")); - Token * _literal; - Token * _literal_1; - void *a; - if ( - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (a = del_targets_rule(p), 1) // del_targets? - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' del_targets? ']'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_List ( a , Del , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' del_targets? ']'")); - } - { // invalid_del_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); - void *invalid_del_target_var; - if ( - (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target - ) - { - D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); - _res = invalid_del_target_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_del_target")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// del_target_end: ')' | ']' | ',' | ';' | NEWLINE -static void * -del_target_end_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); - } - { // ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "']'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "']'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "']'")); - } - { // ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); - } - { // ';' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 13)) // token=';' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'")); - } - { // NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - Token * newline_var; - if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - _res = newline_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// targets: ','.target+ ','? -static asdl_seq* -targets_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq* _res = NULL; - int _mark = p->mark; - { // ','.target+ ','? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.target+ ','?")); - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - asdl_seq * a; - if ( - (a = _gather_123_rule(p)) // ','.target+ - && - (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? - ) - { - D(fprintf(stderr, "%*c+ targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.target+ ','?")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','.target+ ','?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// target: -// | t_primary '.' NAME !t_lookahead -// | t_primary '[' slices ']' !t_lookahead -// | t_atom -static expr_ty -target_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, target_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME !t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - && - (b = _PyPegen_name_token(p)) // NAME - && - _PyPegen_lookahead(0, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Attribute ( a , b -> v . Name . id , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); - } - { // t_primary '[' slices ']' !t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - Token * _literal; - Token * _literal_1; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - && - _PyPegen_lookahead(0, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Subscript ( a , b , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); - } - { // t_atom - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_atom")); - expr_ty t_atom_var; - if ( - (t_atom_var = t_atom_rule(p)) // t_atom - ) - { - D(fprintf(stderr, "%*c+ target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_atom")); - _res = t_atom_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_atom")); - } - _res = NULL; - done: - _PyPegen_insert_memo(p, _mark, target_type, _res); - D(p->level--); - return _res; -} - -// Left-recursive -// t_primary: -// | t_primary '.' NAME &t_lookahead -// | t_primary '[' slices ']' &t_lookahead -// | t_primary genexp &t_lookahead -// | t_primary '(' arguments? ')' &t_lookahead -// | atom &t_lookahead -static expr_ty t_primary_raw(Parser *); -static expr_ty -t_primary_rule(Parser *p) -{ - D(p->level++); - expr_ty _res = NULL; - if (_PyPegen_is_memoized(p, t_primary_type, &_res)) { - D(p->level--); - return _res; - } - int _mark = p->mark; - int _resmark = p->mark; - while (1) { - int tmpvar_8 = _PyPegen_update_memo(p, _mark, t_primary_type, _res); - if (tmpvar_8) { - D(p->level--); - return _res; - } - p->mark = _mark; - void *_raw = t_primary_raw(p); - if (_raw == NULL || p->mark <= _resmark) - break; - _resmark = p->mark; - _res = _raw; - } - p->mark = _resmark; - D(p->level--); - return _res; -} -static expr_ty -t_primary_raw(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME &t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &t_lookahead")); - Token * _literal; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - && - (b = _PyPegen_name_token(p)) // NAME - && - _PyPegen_lookahead(1, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Attribute ( a , b -> v . Name . id , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME &t_lookahead")); - } - { // t_primary '[' slices ']' &t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); - Token * _literal; - Token * _literal_1; - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (b = slices_rule(p)) // slices - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - && - _PyPegen_lookahead(1, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Subscript ( a , b , Load , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' &t_lookahead")); - } - { // t_primary genexp &t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary genexp &t_lookahead")); - expr_ty a; - expr_ty b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (b = genexp_rule(p)) // genexp - && - _PyPegen_lookahead(1, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary genexp &t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Call ( a , CHECK ( _PyPegen_singleton_seq ( p , b ) ) , NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary genexp &t_lookahead")); - } - { // t_primary '(' arguments? ')' &t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); - Token * _literal; - Token * _literal_1; - expr_ty a; - void *b; - if ( - (a = t_primary_rule(p)) // t_primary - && - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (b = arguments_rule(p), 1) // arguments? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - && - _PyPegen_lookahead(1, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Call ( a , ( b ) ? ( ( expr_ty ) b ) -> v . Call . args : NULL , ( b ) ? ( ( expr_ty ) b ) -> v . Call . keywords : NULL , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '(' arguments? ')' &t_lookahead")); - } - { // atom &t_lookahead - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_primary[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "atom &t_lookahead")); - expr_ty a; - if ( - (a = atom_rule(p)) // atom - && - _PyPegen_lookahead(1, t_lookahead_rule, p) - ) - { - D(fprintf(stderr, "%*c+ t_primary[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "atom &t_lookahead")); - _res = a; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_primary[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "atom &t_lookahead")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// t_lookahead: '(' | '[' | '.' -static void * -t_lookahead_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '(' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - ) - { - D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); - } - { // '[' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - ) - { - D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); - } - { // '.' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_lookahead[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - ) - { - D(fprintf(stderr, "%*c+ t_lookahead[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_lookahead[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// t_atom: NAME | '(' target ')' | '(' targets? ')' | '[' targets? ']' -static expr_ty -t_atom_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - expr_ty _res = NULL; - int _mark = p->mark; - if (p->mark == p->fill && _PyPegen_fill_token(p) < 0) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - int _start_lineno = p->tokens[_mark]->lineno; - UNUSED(_start_lineno); // Only used by EXTRA macro - int _start_col_offset = p->tokens[_mark]->col_offset; - UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); - expr_ty a; - if ( - (a = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); - _res = _PyPegen_set_expr_context ( p , a , Store ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); - } - { // '(' target ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' target ')'")); - Token * _literal; - Token * _literal_1; - expr_ty a; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (a = target_rule(p)) // target - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' target ')'")); - _res = _PyPegen_set_expr_context ( p , a , Store ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' target ')'")); - } - { // '(' targets? ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' targets? ')'")); - Token * _literal; - Token * _literal_1; - void *b; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (b = targets_rule(p), 1) // targets? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' targets? ')'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_Tuple ( b , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' targets? ')'")); - } - { // '[' targets? ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' targets? ']'")); - Token * _literal; - Token * _literal_1; - void *b; - if ( - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - && - (b = targets_rule(p), 1) // targets? - && - (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' targets? ']'")); - Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); - if (_token == NULL) { - D(p->level--); - return NULL; - } - int _end_lineno = _token->end_lineno; - UNUSED(_end_lineno); // Only used by EXTRA macro - int _end_col_offset = _token->end_col_offset; - UNUSED(_end_col_offset); // Only used by EXTRA macro - _res = _Py_List ( b , Store , EXTRA ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' targets? ']'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// incorrect_arguments: -// | args ',' '*' -// | expression for_if_clauses ',' [args | expression for_if_clauses] -// | args for_if_clauses -// | args ',' expression for_if_clauses -// | args ',' args -static void * -incorrect_arguments_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // args ',' '*' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' '*'")); - Token * _literal; - Token * _literal_1; - expr_ty args_var; - if ( - (args_var = args_rule(p)) // args - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_literal_1 = _PyPegen_expect_token(p, 16)) // token='*' - ) - { - D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' '*'")); - _res = RAISE_SYNTAX_ERROR ( "iterable argument unpacking follows keyword argument unpacking" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' '*'")); - } - { // expression for_if_clauses ',' [args | expression for_if_clauses] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); - Token * _literal; - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - expr_ty a; - asdl_seq* for_if_clauses_var; - if ( - (a = expression_rule(p)) // expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] - ) - { - D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); - } - { // args for_if_clauses - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args for_if_clauses")); - expr_ty a; - asdl_seq* for_if_clauses_var; - if ( - (a = args_rule(p)) // args - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args for_if_clauses")); - _res = _PyPegen_nonparen_genexp_in_call ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args for_if_clauses")); - } - { // args ',' expression for_if_clauses - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' expression for_if_clauses")); - Token * _literal; - expr_ty a; - expr_ty args_var; - asdl_seq* for_if_clauses_var; - if ( - (args_var = args_rule(p)) // args - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (a = expression_rule(p)) // expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' expression for_if_clauses")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "Generator expression must be parenthesized" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' expression for_if_clauses")); - } - { // args ',' args - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> incorrect_arguments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args ',' args")); - Token * _literal; - expr_ty a; - expr_ty args_var; - if ( - (a = args_rule(p)) // args - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (args_var = args_rule(p)) // args - ) - { - D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args ',' args")); - _res = _PyPegen_arguments_parsing_error ( p , a ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s incorrect_arguments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args ',' args")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_kwarg: expression '=' -static void * -invalid_kwarg_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // expression '=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_kwarg[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression '='")); - Token * _literal; - expr_ty a; - if ( - (a = expression_rule(p)) // expression - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - ) - { - D(fprintf(stderr, "%*c+ invalid_kwarg[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression '='")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "expression cannot contain assignment, perhaps you meant \"==\"?" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_kwarg[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression '='")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_named_expression: expression ':=' expression -static void * -invalid_named_expression_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // expression ':=' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':=' expression")); - Token * _literal; - expr_ty a; - expr_ty expression_var; - if ( - (a = expression_rule(p)) // expression - && - (_literal = _PyPegen_expect_token(p, 53)) // token=':=' - && - (expression_var = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ invalid_named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':=' expression")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot use assignment expressions with %s" , _PyPegen_get_expr_name ( a ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_named_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':=' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_assignment: -// | list ':' -// | tuple ':' -// | star_named_expression ',' star_named_expressions* ':' -// | expression ':' expression ['=' annotated_rhs] -// | ((star_targets '='))* star_expressions '=' -// | ((star_targets '='))* yield_expr '=' -// | star_expressions augassign (yield_expr | star_expressions) -static void * -invalid_assignment_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // list ':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list ':'")); - Token * _literal; - expr_ty a; - if ( - (a = list_rule(p)) // list - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list ':'")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list ':'")); - } - { // tuple ':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple ':'")); - Token * _literal; - expr_ty a; - if ( - (a = tuple_rule(p)) // tuple - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple ':'")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple ':'")); - } - { // star_named_expression ',' star_named_expressions* ':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); - Token * _literal; - Token * _literal_1; - asdl_seq * _loop0_126_var; - expr_ty a; - if ( - (a = star_named_expression_rule(p)) // star_named_expression - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_loop0_126_var = _loop0_126_rule(p)) // star_named_expressions* - && - (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); - } - { // expression ':' expression ['=' annotated_rhs] - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); - Token * _literal; - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings - expr_ty a; - expr_ty expression_var; - if ( - (a = expression_rule(p)) // expression - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_127_rule(p), 1) // ['=' annotated_rhs] - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "illegal target for annotation" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); - } - { // ((star_targets '='))* star_expressions '=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); - Token * _literal; - asdl_seq * _loop0_128_var; - expr_ty a; - if ( - (_loop0_128_var = _loop0_128_rule(p)) // ((star_targets '='))* - && - (a = star_expressions_rule(p)) // star_expressions - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))* star_expressions '='")); - } - { // ((star_targets '='))* yield_expr '=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); - Token * _literal; - asdl_seq * _loop0_129_var; - expr_ty a; - if ( - (_loop0_129_var = _loop0_129_rule(p)) // ((star_targets '='))* - && - (a = yield_expr_rule(p)) // yield_expr - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "assignment to yield expression not possible" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))* yield_expr '='")); - } - { // star_expressions augassign (yield_expr | star_expressions) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_130_var; - expr_ty a; - AugOperator* augassign_var; - if ( - (a = star_expressions_rule(p)) // star_expressions - && - (augassign_var = augassign_rule(p)) // augassign - && - (_tmp_130_var = _tmp_130_rule(p)) // yield_expr | star_expressions - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "'%s' is an illegal expression for augmented assignment" , _PyPegen_get_expr_name ( a ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_block: NEWLINE !INDENT -static void * -invalid_block_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // NEWLINE !INDENT - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_block[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE !INDENT")); - Token * newline_var; - if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - && - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, INDENT) // token=INDENT - ) - { - D(fprintf(stderr, "%*c+ invalid_block[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE !INDENT")); - _res = RAISE_INDENTATION_ERROR ( "expected an indented block" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_block[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE !INDENT")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_comprehension: ('[' | '(' | '{') starred_expression for_if_clauses -static void * -invalid_comprehension_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ('[' | '(' | '{') starred_expression for_if_clauses - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_131_var; - expr_ty a; - asdl_seq* for_if_clauses_var; - if ( - (_tmp_131_var = _tmp_131_rule(p)) // '[' | '(' | '{' - && - (a = starred_expression_rule(p)) // starred_expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - D(fprintf(stderr, "%*c+ invalid_comprehension[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "iterable unpacking cannot be used in comprehension" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_comprehension[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_dict_comprehension: '{' '**' bitwise_or for_if_clauses '}' -static void * -invalid_dict_comprehension_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '{' '**' bitwise_or for_if_clauses '}' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_dict_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); - Token * _literal; - Token * _literal_1; - Token * a; - expr_ty bitwise_or_var; - asdl_seq* for_if_clauses_var; - if ( - (_literal = _PyPegen_expect_token(p, 25)) // token='{' - && - (a = _PyPegen_expect_token(p, 35)) // token='**' - && - (bitwise_or_var = bitwise_or_rule(p)) // bitwise_or - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - && - (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' - ) - { - D(fprintf(stderr, "%*c+ invalid_dict_comprehension[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "dict unpacking cannot be used in dict comprehension" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_dict_comprehension[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' '**' bitwise_or for_if_clauses '}'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_parameters: -// | param_no_default* (slash_with_default | param_with_default+) param_no_default -static void * -invalid_parameters_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // param_no_default* (slash_with_default | param_with_default+) param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); - asdl_seq * _loop0_132_var; - void *_tmp_133_var; - arg_ty param_no_default_var; - if ( - (_loop0_132_var = _loop0_132_rule(p)) // param_no_default* - && - (_tmp_133_var = _tmp_133_rule(p)) // slash_with_default | param_with_default+ - && - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - D(fprintf(stderr, "%*c+ invalid_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); - _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_lambda_parameters: -// | lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default -static void * -invalid_lambda_parameters_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); - asdl_seq * _loop0_134_var; - void *_tmp_135_var; - arg_ty lambda_param_no_default_var; - if ( - (_loop0_134_var = _loop0_134_rule(p)) // lambda_param_no_default* - && - (_tmp_135_var = _tmp_135_rule(p)) // lambda_slash_with_default | lambda_param_with_default+ - && - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - D(fprintf(stderr, "%*c+ invalid_lambda_parameters[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); - _res = RAISE_SYNTAX_ERROR ( "non-default argument follows default argument" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_lambda_parameters[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_star_etc: '*' (')' | ',' (')' | '**')) | '*' ',' TYPE_COMMENT -static void * -invalid_star_etc_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '*' (')' | ',' (')' | '**')) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); - Token * _literal; - void *_tmp_136_var; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (_tmp_136_var = _tmp_136_rule(p)) // ')' | ',' (')' | '**') - ) - { - D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); - _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); - } - { // '*' ',' TYPE_COMMENT - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' ',' TYPE_COMMENT")); - Token * _literal; - Token * _literal_1; - Token * type_comment_var; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' - && - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - ) - { - D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' ',' TYPE_COMMENT")); - _res = RAISE_SYNTAX_ERROR ( "bare * has associated type comment" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' ',' TYPE_COMMENT")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_lambda_star_etc: '*' (':' | ',' (':' | '**')) -static void * -invalid_lambda_star_etc_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '*' (':' | ',' (':' | '**')) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); - Token * _literal; - void *_tmp_137_var; - if ( - (_literal = _PyPegen_expect_token(p, 16)) // token='*' - && - (_tmp_137_var = _tmp_137_rule(p)) // ':' | ',' (':' | '**') - ) - { - D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); - _res = RAISE_SYNTAX_ERROR ( "named arguments must follow bare *" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_lambda_star_etc[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_double_type_comments: TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT -static void * -invalid_double_type_comments_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_double_type_comments[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); - Token * indent_var; - Token * newline_var; - Token * newline_var_1; - Token * type_comment_var; - Token * type_comment_var_1; - if ( - (type_comment_var = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - && - (type_comment_var_1 = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' - && - (newline_var_1 = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - && - (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' - ) - { - D(fprintf(stderr, "%*c+ invalid_double_type_comments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); - _res = RAISE_SYNTAX_ERROR ( "Cannot have two type comments on def" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_double_type_comments[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_del_target: star_expression &del_target_end -static void * -invalid_del_target_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // star_expression &del_target_end - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); - expr_ty a; - if ( - (a = star_expression_rule(p)) // star_expression - && - _PyPegen_lookahead(1, del_target_end_rule, p) - ) - { - D(fprintf(stderr, "%*c+ invalid_del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_del_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression &del_target_end")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// invalid_import_from_targets: import_from_as_names ',' -static void * -invalid_import_from_targets_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // import_from_as_names ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_import_from_targets[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','")); - Token * _literal; - asdl_seq* import_from_as_names_var; - if ( - (import_from_as_names_var = import_from_as_names_rule(p)) // import_from_as_names - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ invalid_import_from_targets[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_names ','")); - _res = RAISE_SYNTAX_ERROR ( "trailing comma not allowed without surrounding parentheses" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_import_from_targets[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_names ','")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_1: NEWLINE -static asdl_seq * -_loop0_1_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_1[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - Token * newline_var; - while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = newline_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_1[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_1_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_2: NEWLINE -static asdl_seq * -_loop0_2_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_2[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - Token * newline_var; - while ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - _res = newline_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_2[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_2_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_4: ',' expression -static asdl_seq * -_loop0_4_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_4[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = expression_rule(p)) // expression - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_4[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_4_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_3: expression _loop0_4 -static asdl_seq * -_gather_3_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // expression _loop0_4 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_3[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_4")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_4_rule(p)) // _loop0_4 - ) - { - D(fprintf(stderr, "%*c+ _gather_3[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_4")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_3[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_4")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_6: ',' expression -static asdl_seq * -_loop0_6_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_6[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = expression_rule(p)) // expression - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_6[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_6_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_5: expression _loop0_6 -static asdl_seq * -_gather_5_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // expression _loop0_6 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_5[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_6")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_6_rule(p)) // _loop0_6 - ) - { - D(fprintf(stderr, "%*c+ _gather_5[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_6")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_5[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_6")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_8: ',' expression -static asdl_seq * -_loop0_8_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_8[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = expression_rule(p)) // expression - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_8[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_8_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_7: expression _loop0_8 -static asdl_seq * -_gather_7_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // expression _loop0_8 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_7[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_8")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_8_rule(p)) // _loop0_8 - ) - { - D(fprintf(stderr, "%*c+ _gather_7[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_8")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_7[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_8")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_10: ',' expression -static asdl_seq * -_loop0_10_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_10[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = expression_rule(p)) // expression - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_10[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_10_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_9: expression _loop0_10 -static asdl_seq * -_gather_9_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // expression _loop0_10 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_9[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression _loop0_10")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = expression_rule(p)) // expression - && - (seq = _loop0_10_rule(p)) // _loop0_10 - ) - { - D(fprintf(stderr, "%*c+ _gather_9[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression _loop0_10")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_9[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression _loop0_10")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_11: statement -static asdl_seq * -_loop1_11_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // statement - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_11[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "statement")); - asdl_seq* statement_var; - while ( - (statement_var = statement_rule(p)) // statement - ) - { - _res = statement_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_11[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "statement")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_11_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_13: ';' small_stmt -static asdl_seq * -_loop0_13_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ';' small_stmt - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_13[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';' small_stmt")); - Token * _literal; - stmt_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 13)) // token=';' - && - (elem = small_stmt_rule(p)) // small_stmt - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_13[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';' small_stmt")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_13_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_12: small_stmt _loop0_13 -static asdl_seq * -_gather_12_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // small_stmt _loop0_13 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_12[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "small_stmt _loop0_13")); - stmt_ty elem; - asdl_seq * seq; - if ( - (elem = small_stmt_rule(p)) // small_stmt - && - (seq = _loop0_13_rule(p)) // _loop0_13 - ) - { - D(fprintf(stderr, "%*c+ _gather_12[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "small_stmt _loop0_13")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_12[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "small_stmt _loop0_13")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_14: 'import' | 'from' -static void * -_tmp_14_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'import' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'import'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 513)) // token='import' - ) - { - D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'import'")); - _res = _keyword; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'import'")); - } - { // 'from' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_14[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 514)) // token='from' - ) - { - D(fprintf(stderr, "%*c+ _tmp_14[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from'")); - _res = _keyword; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_14[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_15: 'def' | '@' | ASYNC -static void * -_tmp_15_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'def' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 522)) // token='def' - ) - { - D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); - _res = _keyword; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'def'")); - } - { // '@' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 49)) // token='@' - ) - { - D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); - } - { // ASYNC - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); - Token * async_var; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - ) - { - D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); - _res = async_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_15[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_16: 'class' | '@' -static void * -_tmp_16_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'class' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 523)) // token='class' - ) - { - D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); - _res = _keyword; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'class'")); - } - { // '@' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 49)) // token='@' - ) - { - D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_16[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_17: 'with' | ASYNC -static void * -_tmp_17_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'with' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'with'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 519)) // token='with' - ) - { - D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'with'")); - _res = _keyword; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'with'")); - } - { // ASYNC - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_17[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); - Token * async_var; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - ) - { - D(fprintf(stderr, "%*c+ _tmp_17[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); - _res = async_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_17[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_18: 'for' | ASYNC -static void * -_tmp_18_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'for' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for'")); - Token * _keyword; - if ( - (_keyword = _PyPegen_expect_token(p, 517)) // token='for' - ) - { - D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for'")); - _res = _keyword; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for'")); - } - { // ASYNC - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_18[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC")); - Token * async_var; - if ( - (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' - ) - { - D(fprintf(stderr, "%*c+ _tmp_18[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC")); - _res = async_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_18[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_19: '=' annotated_rhs -static void * -_tmp_19_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '=' annotated_rhs - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_19[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - Token * _literal; - expr_ty d; - if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (d = annotated_rhs_rule(p)) // annotated_rhs - ) - { - D(fprintf(stderr, "%*c+ _tmp_19[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - _res = d; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_19[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_20: '(' single_target ')' | single_subscript_attribute_target -static void * -_tmp_20_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '(' single_target ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); - Token * _literal; - Token * _literal_1; - expr_ty b; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (b = single_target_rule(p)) // single_target - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ _tmp_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' single_target ')'")); - _res = b; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_20[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' single_target ')'")); - } - { // single_subscript_attribute_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_20[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); - expr_ty single_subscript_attribute_target_var; - if ( - (single_subscript_attribute_target_var = single_subscript_attribute_target_rule(p)) // single_subscript_attribute_target - ) - { - D(fprintf(stderr, "%*c+ _tmp_20[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_subscript_attribute_target")); - _res = single_subscript_attribute_target_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_20[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_subscript_attribute_target")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_21: '=' annotated_rhs -static void * -_tmp_21_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '=' annotated_rhs - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_21[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - Token * _literal; - expr_ty d; - if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (d = annotated_rhs_rule(p)) // annotated_rhs - ) - { - D(fprintf(stderr, "%*c+ _tmp_21[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - _res = d; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_21[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_22: (star_targets '=') -static asdl_seq * -_loop1_22_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // (star_targets '=') - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_138_var; - while ( - (_tmp_138_var = _tmp_138_rule(p)) // star_targets '=' - ) - { - _res = _tmp_138_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_22[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_22_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_23: yield_expr | star_expressions -static void * -_tmp_23_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // yield_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); - expr_ty yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); - _res = yield_expr_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); - } - { // star_expressions - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_23[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - D(fprintf(stderr, "%*c+ _tmp_23[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - _res = star_expressions_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_23[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_24: yield_expr | star_expressions -static void * -_tmp_24_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // yield_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); - expr_ty yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); - _res = yield_expr_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); - } - { // star_expressions - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_24[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - D(fprintf(stderr, "%*c+ _tmp_24[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - _res = star_expressions_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_24[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_26: ',' NAME -static asdl_seq * -_loop0_26_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_26[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = _PyPegen_name_token(p)) // NAME - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_26[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_26_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_25: NAME _loop0_26 -static asdl_seq * -_gather_25_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // NAME _loop0_26 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_25[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_26")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = _PyPegen_name_token(p)) // NAME - && - (seq = _loop0_26_rule(p)) // _loop0_26 - ) - { - D(fprintf(stderr, "%*c+ _gather_25[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_26")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_25[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_26")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_28: ',' NAME -static asdl_seq * -_loop0_28_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_28[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' NAME")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = _PyPegen_name_token(p)) // NAME - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_28[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' NAME")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_28_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_27: NAME _loop0_28 -static asdl_seq * -_gather_27_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // NAME _loop0_28 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_27[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME _loop0_28")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = _PyPegen_name_token(p)) // NAME - && - (seq = _loop0_28_rule(p)) // _loop0_28 - ) - { - D(fprintf(stderr, "%*c+ _gather_27[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME _loop0_28")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_27[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME _loop0_28")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_29: ',' expression -static void * -_tmp_29_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ',' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_29[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); - Token * _literal; - expr_ty z; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (z = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_29[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_29[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_30: ('.' | '...') -static asdl_seq * -_loop0_30_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ('.' | '...') - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_139_var; - while ( - (_tmp_139_var = _tmp_139_rule(p)) // '.' | '...' - ) - { - _res = _tmp_139_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_30[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_30_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_31: ('.' | '...') -static asdl_seq * -_loop1_31_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ('.' | '...') - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_140_var; - while ( - (_tmp_140_var = _tmp_140_rule(p)) // '.' | '...' - ) - { - _res = _tmp_140_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_31[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_31_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_33: ',' import_from_as_name -static asdl_seq * -_loop0_33_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' import_from_as_name - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name")); - Token * _literal; - alias_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = import_from_as_name_rule(p)) // import_from_as_name - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_33[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_33_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_32: import_from_as_name _loop0_33 -static asdl_seq * -_gather_32_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // import_from_as_name _loop0_33 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); - alias_ty elem; - asdl_seq * seq; - if ( - (elem = import_from_as_name_rule(p)) // import_from_as_name - && - (seq = _loop0_33_rule(p)) // _loop0_33 - ) - { - D(fprintf(stderr, "%*c+ _gather_32[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_32[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_33")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_34: 'as' NAME -static void * -_tmp_34_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'as' NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); - Token * _keyword; - expr_ty z; - if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' - && - (z = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_36: ',' dotted_as_name -static asdl_seq * -_loop0_36_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' dotted_as_name - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name")); - Token * _literal; - alias_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = dotted_as_name_rule(p)) // dotted_as_name - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_36[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_36_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_35: dotted_as_name _loop0_36 -static asdl_seq * -_gather_35_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // dotted_as_name _loop0_36 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); - alias_ty elem; - asdl_seq * seq; - if ( - (elem = dotted_as_name_rule(p)) // dotted_as_name - && - (seq = _loop0_36_rule(p)) // _loop0_36 - ) - { - D(fprintf(stderr, "%*c+ _gather_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_35[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_36")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_37: 'as' NAME -static void * -_tmp_37_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'as' NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); - Token * _keyword; - expr_ty z; - if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' - && - (z = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ _tmp_37[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_37[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_39: ',' with_item -static asdl_seq * -_loop0_39_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' with_item - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); - Token * _literal; - withitem_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = with_item_rule(p)) // with_item - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_39_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_38: with_item _loop0_39 -static asdl_seq * -_gather_38_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // with_item _loop0_39 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); - withitem_ty elem; - asdl_seq * seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_39_rule(p)) // _loop0_39 - ) - { - D(fprintf(stderr, "%*c+ _gather_38[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_38[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_39")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_41: ',' with_item -static asdl_seq * -_loop0_41_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' with_item - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); - Token * _literal; - withitem_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = with_item_rule(p)) // with_item - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_41[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_41_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_40: with_item _loop0_41 -static asdl_seq * -_gather_40_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // with_item _loop0_41 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); - withitem_ty elem; - asdl_seq * seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_41_rule(p)) // _loop0_41 - ) - { - D(fprintf(stderr, "%*c+ _gather_40[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_40[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_41")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_43: ',' with_item -static asdl_seq * -_loop0_43_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' with_item - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); - Token * _literal; - withitem_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = with_item_rule(p)) // with_item - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_43[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_43_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_42: with_item _loop0_43 -static asdl_seq * -_gather_42_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // with_item _loop0_43 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); - withitem_ty elem; - asdl_seq * seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_43_rule(p)) // _loop0_43 - ) - { - D(fprintf(stderr, "%*c+ _gather_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_42[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_43")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_45: ',' with_item -static asdl_seq * -_loop0_45_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' with_item - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); - Token * _literal; - withitem_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = with_item_rule(p)) // with_item - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_45_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_44: with_item _loop0_45 -static asdl_seq * -_gather_44_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // with_item _loop0_45 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); - withitem_ty elem; - asdl_seq * seq; - if ( - (elem = with_item_rule(p)) // with_item - && - (seq = _loop0_45_rule(p)) // _loop0_45 - ) - { - D(fprintf(stderr, "%*c+ _gather_44[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_44[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_45")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_46: 'as' target -static void * -_tmp_46_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'as' target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' target")); - Token * _keyword; - expr_ty t; - if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' - && - (t = target_rule(p)) // target - ) - { - D(fprintf(stderr, "%*c+ _tmp_46[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' target")); - _res = t; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_46[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' target")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_47: except_block -static asdl_seq * -_loop1_47_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // except_block - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); - excepthandler_ty except_block_var; - while ( - (except_block_var = except_block_rule(p)) // except_block - ) - { - _res = except_block_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_47[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_47_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_48: 'as' NAME -static void * -_tmp_48_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'as' NAME - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); - Token * _keyword; - expr_ty z; - if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' - && - (z = _PyPegen_name_token(p)) // NAME - ) - { - D(fprintf(stderr, "%*c+ _tmp_48[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_48[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_49: 'from' expression -static void * -_tmp_49_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'from' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression")); - Token * _keyword; - expr_ty z; - if ( - (_keyword = _PyPegen_expect_token(p, 514)) // token='from' - && - (z = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_49[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_49[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_50: '->' expression -static void * -_tmp_50_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '->' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); - Token * _literal; - expr_ty z; - if ( - (_literal = _PyPegen_expect_token(p, 51)) // token='->' - && - (z = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_50[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_50[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_51: '->' expression -static void * -_tmp_51_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '->' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); - Token * _literal; - expr_ty z; - if ( - (_literal = _PyPegen_expect_token(p, 51)) // token='->' - && - (z = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_51[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_52: NEWLINE INDENT -static void * -_tmp_52_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // NEWLINE INDENT - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); - Token * indent_var; - Token * newline_var; - if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - && - (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' - ) - { - D(fprintf(stderr, "%*c+ _tmp_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); - _res = _PyPegen_dummy_name(p, newline_var, indent_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_52[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_53: param_no_default -static asdl_seq * -_loop0_53_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); - arg_ty param_no_default_var; - while ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_53_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_54: param_with_default -static asdl_seq * -_loop0_54_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); - NameDefaultPair* param_with_default_var; - while ( - (param_with_default_var = param_with_default_rule(p)) // param_with_default - ) - { - _res = param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_55: param_with_default -static asdl_seq * -_loop0_55_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); - NameDefaultPair* param_with_default_var; - while ( - (param_with_default_var = param_with_default_rule(p)) // param_with_default - ) - { - _res = param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_56: param_no_default -static asdl_seq * -_loop1_56_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); - arg_ty param_no_default_var; - while ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_56[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_56_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_57: param_with_default -static asdl_seq * -_loop0_57_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); - NameDefaultPair* param_with_default_var; - while ( - (param_with_default_var = param_with_default_rule(p)) // param_with_default - ) - { - _res = param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_57_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_58: param_with_default -static asdl_seq * -_loop1_58_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); - NameDefaultPair* param_with_default_var; - while ( - (param_with_default_var = param_with_default_rule(p)) // param_with_default - ) - { - _res = param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_58[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_58_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_59: param_no_default -static asdl_seq * -_loop1_59_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); - arg_ty param_no_default_var; - while ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_59[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_60: param_no_default -static asdl_seq * -_loop1_60_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); - arg_ty param_no_default_var; - while ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_61: param_no_default -static asdl_seq * -_loop0_61_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); - arg_ty param_no_default_var; - while ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_61[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_61_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_62: param_with_default -static asdl_seq * -_loop1_62_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); - NameDefaultPair* param_with_default_var; - while ( - (param_with_default_var = param_with_default_rule(p)) // param_with_default - ) - { - _res = param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_62_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_63: param_no_default -static asdl_seq * -_loop0_63_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); - arg_ty param_no_default_var; - while ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_63[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_63_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_64: param_with_default -static asdl_seq * -_loop1_64_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); - NameDefaultPair* param_with_default_var; - while ( - (param_with_default_var = param_with_default_rule(p)) // param_with_default - ) - { - _res = param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_64[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_64_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_65: param_maybe_default -static asdl_seq * -_loop0_65_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_maybe_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); - NameDefaultPair* param_maybe_default_var; - while ( - (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default - ) - { - _res = param_maybe_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_65[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_65_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_66: param_maybe_default -static asdl_seq * -_loop1_66_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_maybe_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); - NameDefaultPair* param_maybe_default_var; - while ( - (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default - ) - { - _res = param_maybe_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_66[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_66_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_67: ('@' named_expression NEWLINE) -static asdl_seq * -_loop1_67_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ('@' named_expression NEWLINE) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_141_var; - while ( - (_tmp_141_var = _tmp_141_rule(p)) // '@' named_expression NEWLINE - ) - { - _res = _tmp_141_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_67[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_68: '(' arguments? ')' -static void * -_tmp_68_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '(' arguments? ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); - Token * _literal; - Token * _literal_1; - void *z; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - && - (z = arguments_rule(p), 1) // arguments? - && - (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_70: ',' star_expression -static asdl_seq * -_loop0_70_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' star_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = star_expression_rule(p)) // star_expression - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_70[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_70_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_69: star_expression _loop0_70 -static asdl_seq * -_gather_69_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // star_expression _loop0_70 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = star_expression_rule(p)) // star_expression - && - (seq = _loop0_70_rule(p)) // _loop0_70 - ) - { - D(fprintf(stderr, "%*c+ _gather_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_69[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression _loop0_70")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_71: (',' star_expression) -static asdl_seq * -_loop1_71_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // (',' star_expression) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_142_var; - while ( - (_tmp_142_var = _tmp_142_rule(p)) // ',' star_expression - ) - { - _res = _tmp_142_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_71[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_71_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_73: ',' star_named_expression -static asdl_seq * -_loop0_73_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' star_named_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = star_named_expression_rule(p)) // star_named_expression - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_73[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_73_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_72: star_named_expression _loop0_73 -static asdl_seq * -_gather_72_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // star_named_expression _loop0_73 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = star_named_expression_rule(p)) // star_named_expression - && - (seq = _loop0_73_rule(p)) // _loop0_73 - ) - { - D(fprintf(stderr, "%*c+ _gather_72[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_72[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_73")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_74: (',' expression) -static asdl_seq * -_loop1_74_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // (',' expression) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_143_var; - while ( - (_tmp_143_var = _tmp_143_rule(p)) // ',' expression - ) - { - _res = _tmp_143_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_74[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_74_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_75: lambda_param_no_default -static asdl_seq * -_loop0_75_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_75_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_76: lambda_param_with_default -static asdl_seq * -_loop0_76_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_76[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_77: lambda_param_with_default -static asdl_seq * -_loop0_77_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_78: lambda_param_no_default -static asdl_seq * -_loop1_78_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_78[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_78_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_79: lambda_param_with_default -static asdl_seq * -_loop0_79_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_79_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_80: lambda_param_with_default -static asdl_seq * -_loop1_80_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_80_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_81: lambda_param_no_default -static asdl_seq * -_loop1_81_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_82: lambda_param_no_default -static asdl_seq * -_loop1_82_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_83: lambda_param_no_default -static asdl_seq * -_loop0_83_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_83_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_84: lambda_param_with_default -static asdl_seq * -_loop1_84_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_84_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_85: lambda_param_no_default -static asdl_seq * -_loop0_85_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_85[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_85_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_86: lambda_param_with_default -static asdl_seq * -_loop1_86_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_86_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_87: lambda_param_maybe_default -static asdl_seq * -_loop0_87_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_maybe_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); - NameDefaultPair* lambda_param_maybe_default_var; - while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default - ) - { - _res = lambda_param_maybe_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_87[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_87_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_88: lambda_param_maybe_default -static asdl_seq * -_loop1_88_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_maybe_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); - NameDefaultPair* lambda_param_maybe_default_var; - while ( - (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default - ) - { - _res = lambda_param_maybe_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_88[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_88_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_89: ('or' conjunction) -static asdl_seq * -_loop1_89_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ('or' conjunction) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_144_var; - while ( - (_tmp_144_var = _tmp_144_rule(p)) // 'or' conjunction - ) - { - _res = _tmp_144_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_89[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_90: ('and' inversion) -static asdl_seq * -_loop1_90_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ('and' inversion) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_145_var; - while ( - (_tmp_145_var = _tmp_145_rule(p)) // 'and' inversion - ) - { - _res = _tmp_145_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_90[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_91: compare_op_bitwise_or_pair -static asdl_seq * -_loop1_91_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // compare_op_bitwise_or_pair - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair")); - CmpopExprPair* compare_op_bitwise_or_pair_var; - while ( - (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair - ) - { - _res = compare_op_bitwise_or_pair_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_91[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_92: '!=' -static void * -_tmp_92_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '!=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='")); - Token * tok; - if ( - (tok = _PyPegen_expect_token(p, 28)) // token='!=' - ) - { - D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='")); - _res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_94: ',' slice -static asdl_seq * -_loop0_94_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' slice - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' slice")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = slice_rule(p)) // slice - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_94[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' slice")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_94_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_93: slice _loop0_94 -static asdl_seq * -_gather_93_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // slice _loop0_94 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = slice_rule(p)) // slice - && - (seq = _loop0_94_rule(p)) // _loop0_94 - ) - { - D(fprintf(stderr, "%*c+ _gather_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_93[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice _loop0_94")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_95: ':' expression? -static void * -_tmp_95_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ':' expression? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?")); - Token * _literal; - void *d; - if ( - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (d = expression_rule(p), 1) // expression? - ) - { - D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?")); - _res = d; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_96: tuple | group | genexp -static void * -_tmp_96_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // tuple - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); - expr_ty tuple_var; - if ( - (tuple_var = tuple_rule(p)) // tuple - ) - { - D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); - _res = tuple_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); - } - { // group - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group")); - expr_ty group_var; - if ( - (group_var = group_rule(p)) // group - ) - { - D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group")); - _res = group_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group")); - } - { // genexp - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); - expr_ty genexp_var; - if ( - (genexp_var = genexp_rule(p)) // genexp - ) - { - D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); - _res = genexp_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_97: list | listcomp -static void * -_tmp_97_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // list - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); - expr_ty list_var; - if ( - (list_var = list_rule(p)) // list - ) - { - D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); - _res = list_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); - } - { // listcomp - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp")); - expr_ty listcomp_var; - if ( - (listcomp_var = listcomp_rule(p)) // listcomp - ) - { - D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp")); - _res = listcomp_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_98: dict | set | dictcomp | setcomp -static void * -_tmp_98_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // dict - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict")); - expr_ty dict_var; - if ( - (dict_var = dict_rule(p)) // dict - ) - { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict")); - _res = dict_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict")); - } - { // set - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set")); - expr_ty set_var; - if ( - (set_var = set_rule(p)) // set - ) - { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set")); - _res = set_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set")); - } - { // dictcomp - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp")); - expr_ty dictcomp_var; - if ( - (dictcomp_var = dictcomp_rule(p)) // dictcomp - ) - { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp")); - _res = dictcomp_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp")); - } - { // setcomp - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp")); - expr_ty setcomp_var; - if ( - (setcomp_var = setcomp_rule(p)) // setcomp - ) - { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp")); - _res = setcomp_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_99: STRING -static asdl_seq * -_loop1_99_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // STRING - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); - expr_ty string_var; - while ( - (string_var = _PyPegen_string_token(p)) // STRING - ) - { - _res = string_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_99[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_99_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_100: star_named_expression ',' star_named_expressions? -static void * -_tmp_100_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // star_named_expression ',' star_named_expressions? - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); - Token * _literal; - expr_ty y; - void *z; - if ( - (y = star_named_expression_rule(p)) // star_named_expression - && - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (z = star_named_expressions_rule(p), 1) // star_named_expressions? - ) - { - D(fprintf(stderr, "%*c+ _tmp_100[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); - _res = _PyPegen_seq_insert_in_front ( p , y , z ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_100[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_101: yield_expr | named_expression -static void * -_tmp_101_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // yield_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); - expr_ty yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); - _res = yield_expr_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); - } - { // named_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); - expr_ty named_expression_var; - if ( - (named_expression_var = named_expression_rule(p)) // named_expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); - _res = named_expression_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_103: ',' double_starred_kvpair -static asdl_seq * -_loop0_103_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' double_starred_kvpair - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); - Token * _literal; - KeyValuePair* elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_103[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_103_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_102: double_starred_kvpair _loop0_103 -static asdl_seq * -_gather_102_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // double_starred_kvpair _loop0_103 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); - KeyValuePair* elem; - asdl_seq * seq; - if ( - (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair - && - (seq = _loop0_103_rule(p)) // _loop0_103 - ) - { - D(fprintf(stderr, "%*c+ _gather_102[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_102[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_103")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_104: for_if_clause -static asdl_seq * -_loop1_104_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // for_if_clause - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause")); - comprehension_ty for_if_clause_var; - while ( - (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause - ) - { - _res = for_if_clause_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_104[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_104_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_105: ('if' disjunction) -static asdl_seq * -_loop0_105_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ('if' disjunction) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_146_var; - while ( - (_tmp_146_var = _tmp_146_rule(p)) // 'if' disjunction - ) - { - _res = _tmp_146_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_105[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_105_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_106: ('if' disjunction) -static asdl_seq * -_loop0_106_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ('if' disjunction) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_147_var; - while ( - (_tmp_147_var = _tmp_147_rule(p)) // 'if' disjunction - ) - { - _res = _tmp_147_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_106[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_107: ',' args -static void * -_tmp_107_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ',' args - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); - Token * _literal; - expr_ty c; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (c = args_rule(p)) // args - ) - { - D(fprintf(stderr, "%*c+ _tmp_107[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); - _res = c; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_107[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_108: ',' args -static void * -_tmp_108_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ',' args - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); - Token * _literal; - expr_ty c; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (c = args_rule(p)) // args - ) - { - D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); - _res = c; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_110: ',' kwarg_or_starred -static asdl_seq * -_loop0_110_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' kwarg_or_starred - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); - Token * _literal; - KeywordOrStarred* elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_110[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_110_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_109: kwarg_or_starred _loop0_110 -static asdl_seq * -_gather_109_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // kwarg_or_starred _loop0_110 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); - KeywordOrStarred* elem; - asdl_seq * seq; - if ( - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred - && - (seq = _loop0_110_rule(p)) // _loop0_110 - ) - { - D(fprintf(stderr, "%*c+ _gather_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_109[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_110")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_112: ',' kwarg_or_double_starred -static asdl_seq * -_loop0_112_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' kwarg_or_double_starred - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); - Token * _literal; - KeywordOrStarred* elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_112_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_111: kwarg_or_double_starred _loop0_112 -static asdl_seq * -_gather_111_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // kwarg_or_double_starred _loop0_112 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); - KeywordOrStarred* elem; - asdl_seq * seq; - if ( - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred - && - (seq = _loop0_112_rule(p)) // _loop0_112 - ) - { - D(fprintf(stderr, "%*c+ _gather_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_111[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_112")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_114: ',' kwarg_or_starred -static asdl_seq * -_loop0_114_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' kwarg_or_starred - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); - Token * _literal; - KeywordOrStarred* elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_114[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_114_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_113: kwarg_or_starred _loop0_114 -static asdl_seq * -_gather_113_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // kwarg_or_starred _loop0_114 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); - KeywordOrStarred* elem; - asdl_seq * seq; - if ( - (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred - && - (seq = _loop0_114_rule(p)) // _loop0_114 - ) - { - D(fprintf(stderr, "%*c+ _gather_113[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_113[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_114")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_116: ',' kwarg_or_double_starred -static asdl_seq * -_loop0_116_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' kwarg_or_double_starred - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); - Token * _literal; - KeywordOrStarred* elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_116[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_116_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_115: kwarg_or_double_starred _loop0_116 -static asdl_seq * -_gather_115_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // kwarg_or_double_starred _loop0_116 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); - KeywordOrStarred* elem; - asdl_seq * seq; - if ( - (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred - && - (seq = _loop0_116_rule(p)) // _loop0_116 - ) - { - D(fprintf(stderr, "%*c+ _gather_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_115[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_116")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_117: (',' star_target) -static asdl_seq * -_loop0_117_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // (',' star_target) - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_148_var; - while ( - (_tmp_148_var = _tmp_148_rule(p)) // ',' star_target - ) - { - _res = _tmp_148_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_117[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_119: ',' star_target -static asdl_seq * -_loop0_119_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' star_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = star_target_rule(p)) // star_target - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_119_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_118: star_target _loop0_119 -static asdl_seq * -_gather_118_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // star_target _loop0_119 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = star_target_rule(p)) // star_target - && - (seq = _loop0_119_rule(p)) // _loop0_119 - ) - { - D(fprintf(stderr, "%*c+ _gather_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_118[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_119")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_120: !'*' star_target -static void * -_tmp_120_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // !'*' star_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); - expr_ty star_target_var; - if ( - _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' - && - (star_target_var = star_target_rule(p)) // star_target - ) - { - D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); - _res = star_target_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_122: ',' del_target -static asdl_seq * -_loop0_122_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' del_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = del_target_rule(p)) // del_target - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_122[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_122_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_121: del_target _loop0_122 -static asdl_seq * -_gather_121_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // del_target _loop0_122 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = del_target_rule(p)) // del_target - && - (seq = _loop0_122_rule(p)) // _loop0_122 - ) - { - D(fprintf(stderr, "%*c+ _gather_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_121[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_122")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_124: ',' target -static asdl_seq * -_loop0_124_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // ',' target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' target")); - Token * _literal; - expr_ty elem; - while ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (elem = target_rule(p)) // target - ) - { - _res = elem; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - PyMem_Free(_children); - D(p->level--); - return NULL; - } - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_124[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' target")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_124_type, _seq); - D(p->level--); - return _seq; -} - -// _gather_123: target _loop0_124 -static asdl_seq * -_gather_123_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - asdl_seq * _res = NULL; - int _mark = p->mark; - { // target _loop0_124 - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _gather_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); - expr_ty elem; - asdl_seq * seq; - if ( - (elem = target_rule(p)) // target - && - (seq = _loop0_124_rule(p)) // _loop0_124 - ) - { - D(fprintf(stderr, "%*c+ _gather_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); - _res = _PyPegen_seq_insert_in_front(p, elem, seq); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_123[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "target _loop0_124")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_125: args | expression for_if_clauses -static void * -_tmp_125_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // args - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); - expr_ty args_var; - if ( - (args_var = args_rule(p)) // args - ) - { - D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); - _res = args_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args")); - } - { // expression for_if_clauses - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); - expr_ty expression_var; - asdl_seq* for_if_clauses_var; - if ( - (expression_var = expression_rule(p)) // expression - && - (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses - ) - { - D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); - _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_126: star_named_expressions -static asdl_seq * -_loop0_126_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // star_named_expressions - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); - asdl_seq* star_named_expressions_var; - while ( - (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions - ) - { - _res = star_named_expressions_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_126[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_126_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_127: '=' annotated_rhs -static void * -_tmp_127_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '=' annotated_rhs - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - Token * _literal; - expr_ty annotated_rhs_var; - if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs - ) - { - D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_128: (star_targets '=') -static asdl_seq * -_loop0_128_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // (star_targets '=') - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_149_var; - while ( - (_tmp_149_var = _tmp_149_rule(p)) // star_targets '=' - ) - { - _res = _tmp_149_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_128_type, _seq); - D(p->level--); - return _seq; -} - -// _loop0_129: (star_targets '=') -static asdl_seq * -_loop0_129_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // (star_targets '=') - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_150_var; - while ( - (_tmp_150_var = _tmp_150_rule(p)) // star_targets '=' - ) - { - _res = _tmp_150_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_129_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_130: yield_expr | star_expressions -static void * -_tmp_130_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // yield_expr - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); - expr_ty yield_expr_var; - if ( - (yield_expr_var = yield_expr_rule(p)) // yield_expr - ) - { - D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); - _res = yield_expr_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); - } - { // star_expressions - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); - expr_ty star_expressions_var; - if ( - (star_expressions_var = star_expressions_rule(p)) // star_expressions - ) - { - D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); - _res = star_expressions_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_131: '[' | '(' | '{' -static void * -_tmp_131_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '[' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 9)) // token='[' - ) - { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); - } - { // '(' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 7)) // token='(' - ) - { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); - } - { // '{' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 25)) // token='{' - ) - { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_132: param_no_default -static asdl_seq * -_loop0_132_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); - arg_ty param_no_default_var; - while ( - (param_no_default_var = param_no_default_rule(p)) // param_no_default - ) - { - _res = param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_132_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_133: slash_with_default | param_with_default+ -static void * -_tmp_133_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // slash_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); - SlashWithDefault* slash_with_default_var; - if ( - (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default - ) - { - D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); - _res = slash_with_default_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); - } - { // param_with_default+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_151_var; - if ( - (_loop1_151_var = _loop1_151_rule(p)) // param_with_default+ - ) - { - D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_151_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_134: lambda_param_no_default -static asdl_seq * -_loop0_134_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_no_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); - arg_ty lambda_param_no_default_var; - while ( - (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default - ) - { - _res = lambda_param_no_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_134_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_135: lambda_slash_with_default | lambda_param_with_default+ -static void * -_tmp_135_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // lambda_slash_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); - SlashWithDefault* lambda_slash_with_default_var; - if ( - (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default - ) - { - D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); - _res = lambda_slash_with_default_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); - } - { // lambda_param_with_default+ - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - asdl_seq * _loop1_152_var; - if ( - (_loop1_152_var = _loop1_152_rule(p)) // lambda_param_with_default+ - ) - { - D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - _res = _loop1_152_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_136: ')' | ',' (')' | '**') -static void * -_tmp_136_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); - } - { // ',' (')' | '**') - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - Token * _literal; - void *_tmp_153_var; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_tmp_153_var = _tmp_153_rule(p)) // ')' | '**' - ) - { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_153_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_137: ':' | ',' (':' | '**') -static void * -_tmp_137_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - ) - { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); - } - { // ',' (':' | '**') - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - Token * _literal; - void *_tmp_154_var; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (_tmp_154_var = _tmp_154_rule(p)) // ':' | '**' - ) - { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_154_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_138: star_targets '=' -static void * -_tmp_138_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // star_targets '=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); - Token * _literal; - expr_ty z; - if ( - (z = star_targets_rule(p)) // star_targets - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - ) - { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_139: '.' | '...' -static void * -_tmp_139_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '.' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - ) - { - D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); - } - { // '...' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 52)) // token='...' - ) - { - D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_140: '.' | '...' -static void * -_tmp_140_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '.' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 23)) // token='.' - ) - { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); - } - { // '...' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 52)) // token='...' - ) - { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_141: '@' named_expression NEWLINE -static void * -_tmp_141_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '@' named_expression NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); - Token * _literal; - expr_ty f; - Token * newline_var; - if ( - (_literal = _PyPegen_expect_token(p, 49)) // token='@' - && - (f = named_expression_rule(p)) // named_expression - && - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); - _res = f; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_142: ',' star_expression -static void * -_tmp_142_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ',' star_expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); - Token * _literal; - expr_ty c; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (c = star_expression_rule(p)) // star_expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); - _res = c; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_143: ',' expression -static void * -_tmp_143_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ',' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); - Token * _literal; - expr_ty c; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (c = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); - _res = c; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_144: 'or' conjunction -static void * -_tmp_144_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'or' conjunction - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); - Token * _keyword; - expr_ty c; - if ( - (_keyword = _PyPegen_expect_token(p, 531)) // token='or' - && - (c = conjunction_rule(p)) // conjunction - ) - { - D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); - _res = c; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_145: 'and' inversion -static void * -_tmp_145_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'and' inversion - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); - Token * _keyword; - expr_ty c; - if ( - (_keyword = _PyPegen_expect_token(p, 532)) // token='and' - && - (c = inversion_rule(p)) // inversion - ) - { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); - _res = c; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_146: 'if' disjunction -static void * -_tmp_146_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'if' disjunction - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); - Token * _keyword; - expr_ty z; - if ( - (_keyword = _PyPegen_expect_token(p, 510)) // token='if' - && - (z = disjunction_rule(p)) // disjunction - ) - { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_147: 'if' disjunction -static void * -_tmp_147_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // 'if' disjunction - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); - Token * _keyword; - expr_ty z; - if ( - (_keyword = _PyPegen_expect_token(p, 510)) // token='if' - && - (z = disjunction_rule(p)) // disjunction - ) - { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); - _res = z; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_148: ',' star_target -static void * -_tmp_148_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ',' star_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); - Token * _literal; - expr_ty c; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - && - (c = star_target_rule(p)) // star_target - ) - { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); - _res = c; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_149: star_targets '=' -static void * -_tmp_149_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // star_targets '=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); - Token * _literal; - expr_ty star_targets_var; - if ( - (star_targets_var = star_targets_rule(p)) // star_targets - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - ) - { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); - _res = _PyPegen_dummy_name(p, star_targets_var, _literal); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_150: star_targets '=' -static void * -_tmp_150_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // star_targets '=' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); - Token * _literal; - expr_ty star_targets_var; - if ( - (star_targets_var = star_targets_rule(p)) // star_targets - && - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - ) - { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); - _res = _PyPegen_dummy_name(p, star_targets_var, _literal); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop1_151: param_with_default -static asdl_seq * -_loop1_151_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); - NameDefaultPair* param_with_default_var; - while ( - (param_with_default_var = param_with_default_rule(p)) // param_with_default - ) - { - _res = param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_151[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_151_type, _seq); - D(p->level--); - return _seq; -} - -// _loop1_152: lambda_param_with_default -static asdl_seq * -_loop1_152_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void *_res = NULL; - int _mark = p->mark; - int _start_mark = p->mark; - void **_children = PyMem_Malloc(sizeof(void *)); - if (!_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - ssize_t _children_capacity = 1; - ssize_t _n = 0; - { // lambda_param_with_default - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _loop1_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); - NameDefaultPair* lambda_param_with_default_var; - while ( - (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default - ) - { - _res = lambda_param_with_default_var; - if (_n == _children_capacity) { - _children_capacity *= 2; - void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); - if (!_new_children) { - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - _children = _new_children; - } - _children[_n++] = _res; - _mark = p->mark; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_152[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); - } - if (_n == 0 || p->error_indicator) { - PyMem_Free(_children); - D(p->level--); - return NULL; - } - asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); - if (!_seq) { - PyMem_Free(_children); - p->error_indicator = 1; - PyErr_NoMemory(); - D(p->level--); - return NULL; - } - for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); - PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_152_type, _seq); - D(p->level--); - return _seq; -} - -// _tmp_153: ')' | '**' -static void * -_tmp_153_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); - } - { // '**' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - ) - { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _tmp_154: ':' | '**' -static void * -_tmp_154_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ':' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - ) - { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); - } - { // '**' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 35)) // token='**' - ) - { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -void * -_PyPegen_parse(Parser *p) -{ - // Initialize keywords - p->keywords = reserved_keywords; - p->n_keyword_lists = n_keyword_lists; - - // Run parser - void *result = NULL; - if (p->start_rule == Py_file_input) { - result = file_rule(p); - } else if (p->start_rule == Py_single_input) { - result = interactive_rule(p); - } else if (p->start_rule == Py_eval_input) { - result = eval_rule(p); - } else if (p->start_rule == Py_func_type_input) { - result = func_type_rule(p); - } else if (p->start_rule == Py_fstring_input) { - result = fstring_rule(p); - } - - return result; -} - -// The end diff --git a/Parser/pgen/__init__.py b/Parser/pgen/__init__.py deleted file mode 100644 index e69de29bb2d1d..0000000000000 diff --git a/Parser/pgen/__main__.py b/Parser/pgen/__main__.py deleted file mode 100644 index d3780a7b77de8..0000000000000 --- a/Parser/pgen/__main__.py +++ /dev/null @@ -1,43 +0,0 @@ -import argparse - -from .pgen import ParserGenerator - - -def main(): - parser = argparse.ArgumentParser(description="Parser generator main program.") - parser.add_argument( - "grammar", type=str, help="The file with the grammar definition in EBNF format" - ) - parser.add_argument("tokens", type=str, help="The file with the token definitions") - parser.add_argument( - "graminit_h", - type=argparse.FileType("w"), - help="The path to write the grammar's non-terminals as #defines", - ) - parser.add_argument( - "graminit_c", - type=argparse.FileType("w"), - help="The path to write the grammar as initialized data", - ) - - parser.add_argument("--verbose", "-v", action="count") - parser.add_argument( - "--graph", - type=argparse.FileType("w"), - action="store", - metavar="GRAPH_OUTPUT_FILE", - help="Dumps a DOT representation of the generated automata in a file", - ) - - args = parser.parse_args() - - p = ParserGenerator( - args.grammar, args.tokens, verbose=args.verbose, graph_file=args.graph - ) - grammar = p.make_grammar() - grammar.produce_graminit_h(args.graminit_h.write) - grammar.produce_graminit_c(args.graminit_c.write) - - -if __name__ == "__main__": - main() diff --git a/Parser/pgen/automata.py b/Parser/pgen/automata.py deleted file mode 100644 index f2ed221e2852c..0000000000000 --- a/Parser/pgen/automata.py +++ /dev/null @@ -1,400 +0,0 @@ -"""Classes representing state-machine concepts""" - -class NFA: - """A non deterministic finite automata - - A non deterministic automata is a form of a finite state - machine. An NFA's rules are less restrictive than a DFA. - The NFA rules are: - - * A transition can be non-deterministic and can result in - nothing, one, or two or more states. - - * An epsilon transition consuming empty input is valid. - Transitions consuming labeled symbols are also permitted. - - This class assumes that there is only one starting state and one - accepting (ending) state. - - Attributes: - name (str): The name of the rule the NFA is representing. - start (NFAState): The starting state. - end (NFAState): The ending state - """ - - def __init__(self, start, end): - self.name = start.rule_name - self.start = start - self.end = end - - def __repr__(self): - return "NFA(start={}, end={})".format(self.start, self.end) - - def dump(self, writer=print): - """Dump a graphical representation of the NFA""" - todo = [self.start] - for i, state in enumerate(todo): - writer(" State", i, state is self.end and "(final)" or "") - for arc in state.arcs: - label = arc.label - next = arc.target - if next in todo: - j = todo.index(next) - else: - j = len(todo) - todo.append(next) - if label is None: - writer(" -> %d" % j) - else: - writer(" %s -> %d" % (label, j)) - - def dump_graph(self, writer): - """Dump a DOT representation of the NFA""" - writer('digraph %s_nfa {\n' % self.name) - todo = [self.start] - for i, state in enumerate(todo): - writer(' %d [label="State %d %s"];\n' % (i, i, state is self.end and "(final)" or "")) - for arc in state.arcs: - label = arc.label - next = arc.target - if next in todo: - j = todo.index(next) - else: - j = len(todo) - todo.append(next) - if label is None: - writer(" %d -> %d [style=dotted label=?];\n" % (i, j)) - else: - writer(" %d -> %d [label=%s];\n" % (i, j, label.replace("'", '"'))) - writer('}\n') - - -class NFAArc: - """An arc representing a transition between two NFA states. - - NFA states can be connected via two ways: - - * A label transition: An input equal to the label must - be consumed to perform the transition. - * An epsilon transition: The transition can be taken without - consuming any input symbol. - - Attributes: - target (NFAState): The ending state of the transition arc. - label (Optional[str]): The label that must be consumed to make - the transition. An epsilon transition is represented - using `None`. - """ - - def __init__(self, target, label): - self.target = target - self.label = label - - def __repr__(self): - return "<%s: %s>" % (self.__class__.__name__, self.label) - - -class NFAState: - """A state of a NFA, non deterministic finite automata. - - Attributes: - target (rule_name): The name of the rule used to represent the NFA's - ending state after a transition. - arcs (Dict[Optional[str], NFAState]): A mapping representing transitions - between the current NFA state and another NFA state via following - a label. - """ - - def __init__(self, rule_name): - self.rule_name = rule_name - self.arcs = [] - - def add_arc(self, target, label=None): - """Add a new arc to connect the state to a target state within the NFA - - The method adds a new arc to the list of arcs available as transitions - from the present state. An optional label indicates a named transition - that consumes an input while the absence of a label represents an epsilon - transition. - - Attributes: - target (NFAState): The end of the transition that the arc represents. - label (Optional[str]): The label that must be consumed for making - the transition. If the label is not provided the transition is assumed - to be an epsilon-transition. - """ - assert label is None or isinstance(label, str) - assert isinstance(target, NFAState) - self.arcs.append(NFAArc(target, label)) - - def __repr__(self): - return "<%s: from %s>" % (self.__class__.__name__, self.rule_name) - - -class DFA: - """A deterministic finite automata - - A deterministic finite automata is a form of a finite state machine - that obeys the following rules: - - * Each of the transitions is uniquely determined by - the source state and input symbol - * Reading an input symbol is required for each state - transition (no epsilon transitions). - - The finite-state machine will accept or reject a string of symbols - and only produces a unique computation of the automaton for each input - string. The DFA must have a unique starting state (represented as the first - element in the list of states) but can have multiple final states. - - Attributes: - name (str): The name of the rule the DFA is representing. - states (List[DFAState]): A collection of DFA states. - """ - - def __init__(self, name, states): - self.name = name - self.states = states - - @classmethod - def from_nfa(cls, nfa): - """Constructs a DFA from a NFA using the Rabin?Scott construction algorithm. - - To simulate the operation of a DFA on a given input string, it's - necessary to keep track of a single state at any time, or more precisely, - the state that the automaton will reach after seeing a prefix of the - input. In contrast, to simulate an NFA, it's necessary to keep track of - a set of states: all of the states that the automaton could reach after - seeing the same prefix of the input, according to the nondeterministic - choices made by the automaton. There are two possible sources of - non-determinism: - - 1) Multiple (one or more) transitions with the same label - - 'A' +-------+ - +----------->+ State +----------->+ - | | 2 | - +-------+ +-------+ - | State | - | 1 | +-------+ - +-------+ | State | - +----------->+ 3 +----------->+ - 'A' +-------+ - - 2) Epsilon transitions (transitions that can be taken without consuming any input) - - +-------+ +-------+ - | State | ? | State | - | 1 +----------->+ 2 +----------->+ - +-------+ +-------+ - - Looking at the first case above, we can't determine which transition should be - followed when given an input A. We could choose whether or not to follow the - transition while in the second case the problem is that we can choose both to - follow the transition or not doing it. To solve this problem we can imagine that - we follow all possibilities at the same time and we construct new states from the - set of all possible reachable states. For every case in the previous example: - - - 1) For multiple transitions with the same label we colapse all of the - final states under the same one - - +-------+ +-------+ - | State | 'A' | State | - | 1 +----------->+ 2-3 +----------->+ - +-------+ +-------+ - - 2) For epsilon transitions we collapse all epsilon-reachable states - into the same one - - +-------+ - | State | - | 1-2 +-----------> - +-------+ - - Because the DFA states consist of sets of NFA states, an n-state NFA - may be converted to a DFA with at most 2**n states. Notice that the - constructed DFA is not minimal and can be simplified or reduced - afterwards. - - Parameters: - name (NFA): The NFA to transform to DFA. - """ - assert isinstance(nfa, NFA) - - def add_closure(nfa_state, base_nfa_set): - """Calculate the epsilon-closure of a given state - - Add to the *base_nfa_set* all the states that are - reachable from *nfa_state* via epsilon-transitions. - """ - assert isinstance(nfa_state, NFAState) - if nfa_state in base_nfa_set: - return - base_nfa_set.add(nfa_state) - for nfa_arc in nfa_state.arcs: - if nfa_arc.label is None: - add_closure(nfa_arc.target, base_nfa_set) - - # Calculate the epsilon-closure of the starting state - base_nfa_set = set() - add_closure(nfa.start, base_nfa_set) - - # Start by visiting the NFA starting state (there is only one). - states = [DFAState(nfa.name, base_nfa_set, nfa.end)] - - for state in states: # NB states grow while we're iterating - - # Find transitions from the current state to other reachable states - # and store them in mapping that correlates the label to all the - # possible reachable states that can be obtained by consuming a - # token equal to the label. Each set of all the states that can - # be reached after following a label will be the a DFA state. - arcs = {} - for nfa_state in state.nfa_set: - for nfa_arc in nfa_state.arcs: - if nfa_arc.label is not None: - nfa_set = arcs.setdefault(nfa_arc.label, set()) - # All states that can be reached by epsilon-transitions - # are also included in the set of reachable states. - add_closure(nfa_arc.target, nfa_set) - - # Now create new DFAs by visiting all posible transitions between - # the current DFA state and the new power-set states (each nfa_set) - # via the different labels. As the nodes are appended to *states* this - # is performing a breadth-first search traversal over the power-set of - # the states of the original NFA. - for label, nfa_set in sorted(arcs.items()): - for exisisting_state in states: - if exisisting_state.nfa_set == nfa_set: - # The DFA state already exists for this rule. - next_state = exisisting_state - break - else: - next_state = DFAState(nfa.name, nfa_set, nfa.end) - states.append(next_state) - - # Add a transition between the current DFA state and the new - # DFA state (the power-set state) via the current label. - state.add_arc(next_state, label) - - return cls(nfa.name, states) - - def __iter__(self): - return iter(self.states) - - def simplify(self): - """Attempt to reduce the number of states of the DFA - - Transform the DFA into an equivalent DFA that has fewer states. Two - classes of states can be removed or merged from the original DFA without - affecting the language it accepts to minimize it: - - * Unreachable states can not be reached from the initial - state of the DFA, for any input string. - * Nondistinguishable states are those that cannot be distinguished - from one another for any input string. - - This algorithm does not achieve the optimal fully-reduced solution, but it - works well enough for the particularities of the Python grammar. The - algorithm repeatedly looks for two states that have the same set of - arcs (same labels pointing to the same nodes) and unifies them, until - things stop changing. - """ - changes = True - while changes: - changes = False - for i, state_i in enumerate(self.states): - for j in range(i + 1, len(self.states)): - state_j = self.states[j] - if state_i == state_j: - del self.states[j] - for state in self.states: - state.unifystate(state_j, state_i) - changes = True - break - - def dump(self, writer=print): - """Dump a graphical representation of the DFA""" - for i, state in enumerate(self.states): - writer(" State", i, state.is_final and "(final)" or "") - for label, next in sorted(state.arcs.items()): - writer(" %s -> %d" % (label, self.states.index(next))) - - def dump_graph(self, writer): - """Dump a DOT representation of the DFA""" - writer('digraph %s_dfa {\n' % self.name) - for i, state in enumerate(self.states): - writer(' %d [label="State %d %s"];\n' % (i, i, state.is_final and "(final)" or "")) - for label, next in sorted(state.arcs.items()): - writer(" %d -> %d [label=%s];\n" % (i, self.states.index(next), label.replace("'", '"'))) - writer('}\n') - - -class DFAState(object): - """A state of a DFA - - Attributes: - rule_name (rule_name): The name of the DFA rule containing the represented state. - nfa_set (Set[NFAState]): The set of NFA states used to create this state. - final (bool): True if the state represents an accepting state of the DFA - containing this state. - arcs (Dict[label, DFAState]): A mapping representing transitions between - the current DFA state and another DFA state via following a label. - """ - - def __init__(self, rule_name, nfa_set, final): - assert isinstance(nfa_set, set) - assert isinstance(next(iter(nfa_set)), NFAState) - assert isinstance(final, NFAState) - self.rule_name = rule_name - self.nfa_set = nfa_set - self.arcs = {} # map from terminals/nonterminals to DFAState - self.is_final = final in nfa_set - - def add_arc(self, target, label): - """Add a new arc to the current state. - - Parameters: - target (DFAState): The DFA state at the end of the arc. - label (str): The label respresenting the token that must be consumed - to perform this transition. - """ - assert isinstance(label, str) - assert label not in self.arcs - assert isinstance(target, DFAState) - self.arcs[label] = target - - def unifystate(self, old, new): - """Replace all arcs from the current node to *old* with *new*. - - Parameters: - old (DFAState): The DFA state to remove from all existing arcs. - new (DFAState): The DFA state to replace in all existing arcs. - """ - for label, next_ in self.arcs.items(): - if next_ is old: - self.arcs[label] = new - - def __eq__(self, other): - # The nfa_set does not matter for equality - assert isinstance(other, DFAState) - if self.is_final != other.is_final: - return False - # We cannot just return self.arcs == other.arcs because that - # would invoke this method recursively if there are any cycles. - if len(self.arcs) != len(other.arcs): - return False - for label, next_ in self.arcs.items(): - if next_ is not other.arcs.get(label): - return False - return True - - __hash__ = None # For Py3 compatibility. - - def __repr__(self): - return "<%s: %s is_final=%s>" % ( - self.__class__.__name__, - self.rule_name, - self.is_final, - ) diff --git a/Parser/pgen/grammar.py b/Parser/pgen/grammar.py deleted file mode 100644 index ce40e160ca886..0000000000000 --- a/Parser/pgen/grammar.py +++ /dev/null @@ -1,147 +0,0 @@ -import collections - - -class Grammar: - """Pgen parsing tables class. - - The instance variables are as follows: - - symbol2number -- a dict mapping symbol names to numbers. Symbol - numbers are always 256 or higher, to distinguish - them from token numbers, which are between 0 and - 255 (inclusive). - - number2symbol -- a dict mapping numbers to symbol names; - these two are each other's inverse. - - states -- a list of DFAs, where each DFA is a list of - states, each state is a list of arcs, and each - arc is a (i, j) pair where i is a label and j is - a state number. The DFA number is the index into - this list. (This name is slightly confusing.) - Final states are represented by a special arc of - the form (0, j) where j is its own state number. - - dfas -- a dict mapping symbol numbers to (DFA, first) - pairs, where DFA is an item from the states list - above, and first is a set of tokens that can - begin this grammar rule. - - labels -- a list of (x, y) pairs where x is either a token - number or a symbol number, and y is either None - or a string; the strings are keywords. The label - number is the index in this list; label numbers - are used to mark state transitions (arcs) in the - DFAs. - - start -- the number of the grammar's start symbol. - - keywords -- a dict mapping keyword strings to arc labels. - - tokens -- a dict mapping token numbers to arc labels. - - """ - - def __init__(self): - self.symbol2number = collections.OrderedDict() - self.number2symbol = collections.OrderedDict() - self.states = [] - self.dfas = collections.OrderedDict() - self.labels = [(0, "EMPTY")] - self.keywords = collections.OrderedDict() - self.tokens = collections.OrderedDict() - self.symbol2label = collections.OrderedDict() - self.start = 256 - - def produce_graminit_h(self, writer): - writer("/* Generated by Parser/pgen */\n\n") - for number, symbol in self.number2symbol.items(): - writer("#define {} {}\n".format(symbol, number)) - - def produce_graminit_c(self, writer): - writer("/* Generated by Parser/pgen */\n\n") - - writer('#include "exports.h"\n') - writer('#include "grammar.h"\n') - writer("Py_EXPORTED_SYMBOL grammar _PyParser_Grammar;\n") - - self.print_dfas(writer) - self.print_labels(writer) - - writer("Py_EXPORTED_SYMBOL grammar _PyParser_Grammar = {\n") - writer(" {n_dfas},\n".format(n_dfas=len(self.dfas))) - writer(" dfas,\n") - writer(" {{{n_labels}, labels}},\n".format(n_labels=len(self.labels))) - writer(" {start_number}\n".format(start_number=self.start)) - writer("};\n") - - def print_labels(self, writer): - writer( - "static const label labels[{n_labels}] = {{\n".format( - n_labels=len(self.labels) - ) - ) - for label, name in self.labels: - label_name = '"{}"'.format(name) if name is not None else 0 - writer( - " {{{label}, {label_name}}},\n".format( - label=label, label_name=label_name - ) - ) - writer("};\n") - - def print_dfas(self, writer): - self.print_states(writer) - writer("static const dfa dfas[{}] = {{\n".format(len(self.dfas))) - for dfaindex, dfa_elem in enumerate(self.dfas.items()): - symbol, (dfa, first_sets) = dfa_elem - writer( - ' {{{dfa_symbol}, "{symbol_name}", '.format( - dfa_symbol=symbol, symbol_name=self.number2symbol[symbol] - ) - + "{n_states}, states_{dfa_index},\n".format( - n_states=len(dfa), dfa_index=dfaindex - ) - + ' "' - ) - - bitset = bytearray((len(self.labels) >> 3) + 1) - for token in first_sets: - bitset[token >> 3] |= 1 << (token & 7) - for byte in bitset: - writer("\\%03o" % (byte & 0xFF)) - writer('"},\n') - writer("};\n") - - def print_states(self, write): - for dfaindex, dfa in enumerate(self.states): - self.print_arcs(write, dfaindex, dfa) - write( - "static state states_{dfa_index}[{n_states}] = {{\n".format( - dfa_index=dfaindex, n_states=len(dfa) - ) - ) - for stateindex, state in enumerate(dfa): - narcs = len(state) - write( - " {{{n_arcs}, arcs_{dfa_index}_{state_index}}},\n".format( - n_arcs=narcs, dfa_index=dfaindex, state_index=stateindex - ) - ) - write("};\n") - - def print_arcs(self, write, dfaindex, states): - for stateindex, state in enumerate(states): - narcs = len(state) - write( - "static const arc arcs_{dfa_index}_{state_index}[{n_arcs}] = {{\n".format( - dfa_index=dfaindex, state_index=stateindex, n_arcs=narcs - ) - ) - for a, b in state: - write( - " {{{from_label}, {to_state}}},\n".format( - from_label=a, to_state=b - ) - ) - write("};\n") diff --git a/Parser/pgen/keywordgen.py b/Parser/pgen/keywordgen.py deleted file mode 100644 index f0234a81b62da..0000000000000 --- a/Parser/pgen/keywordgen.py +++ /dev/null @@ -1,59 +0,0 @@ -"""Generate Lib/keyword.py from the Grammar and Tokens files using pgen""" - -import argparse - -from .pgen import ParserGenerator - -TEMPLATE = r''' -"""Keywords (from "Grammar/Grammar") - -This file is automatically generated; please don't muck it up! - -To update the symbols in this file, 'cd' to the top directory of -the python source tree and run: - - python3 -m Parser.pgen.keywordgen Grammar/Grammar \ - Grammar/Tokens \ - Lib/keyword.py - -Alternatively, you can run 'make regen-keyword'. -""" - -__all__ = ["iskeyword", "kwlist"] - -kwlist = [ - {keywords} -] - -iskeyword = frozenset(kwlist).__contains__ -'''.lstrip() - -EXTRA_KEYWORDS = ["async", "await"] - - -def main(): - parser = argparse.ArgumentParser( - description="Generate the Lib/keywords.py " "file from the grammar." - ) - parser.add_argument( - "grammar", type=str, help="The file with the grammar definition in EBNF format" - ) - parser.add_argument("tokens", type=str, help="The file with the token definitions") - parser.add_argument( - "keyword_file", - type=argparse.FileType("w"), - help="The path to write the keyword definitions", - ) - args = parser.parse_args() - p = ParserGenerator(args.grammar, args.tokens) - grammar = p.make_grammar() - - with args.keyword_file as thefile: - all_keywords = sorted(list(grammar.keywords) + EXTRA_KEYWORDS) - - keywords = ",\n ".join(map(repr, all_keywords)) - thefile.write(TEMPLATE.format(keywords=keywords)) - - -if __name__ == "__main__": - main() diff --git a/Parser/pgen/metaparser.py b/Parser/pgen/metaparser.py deleted file mode 100644 index 074a083fb74b8..0000000000000 --- a/Parser/pgen/metaparser.py +++ /dev/null @@ -1,152 +0,0 @@ -"""Parser for the Python metagrammar""" - -import io -import tokenize # from stdlib - -from .automata import NFA, NFAState - - -class GrammarParser: - """Parser for Python grammar files.""" - - _translation_table = { - tokenize.NAME: "NAME", - tokenize.STRING: "STRING", - tokenize.NEWLINE: "NEWLINE", - tokenize.NL: "NL", - tokenize.OP: "OP", - tokenize.ENDMARKER: "ENDMARKER", - tokenize.COMMENT: "COMMENT", - } - - def __init__(self, grammar): - self.grammar = grammar - grammar_adaptor = io.StringIO(grammar) - self.generator = tokenize.generate_tokens(grammar_adaptor.readline) - self._gettoken() # Initialize lookahead - self._current_rule_name = None - - def parse(self): - """Turn the grammar into a collection of NFAs""" - # grammar: (NEWLINE | rule)* ENDMARKER - while self.type != tokenize.ENDMARKER: - while self.type == tokenize.NEWLINE: - self._gettoken() - # rule: NAME ':' rhs NEWLINE - self._current_rule_name = self._expect(tokenize.NAME) - self._expect(tokenize.OP, ":") - a, z = self._parse_rhs() - self._expect(tokenize.NEWLINE) - - yield NFA(a, z) - - def _parse_rhs(self): - # rhs: items ('|' items)* - a, z = self._parse_items() - if self.value != "|": - return a, z - else: - aa = NFAState(self._current_rule_name) - zz = NFAState(self._current_rule_name) - while True: - # Allow to transit directly to the previous state and connect the end of the - # previous state to the end of the current one, effectively allowing to skip - # the current state. - aa.add_arc(a) - z.add_arc(zz) - if self.value != "|": - break - - self._gettoken() - a, z = self._parse_items() - return aa, zz - - def _parse_items(self): - # items: item+ - a, b = self._parse_item() - while self.type in (tokenize.NAME, tokenize.STRING) or self.value in ("(", "["): - c, d = self._parse_item() - # Allow a transition between the end of the previous state - # and the beginning of the new one, connecting all the items - # together. In this way we can only reach the end if we visit - # all the items. - b.add_arc(c) - b = d - return a, b - - def _parse_item(self): - # item: '[' rhs ']' | atom ['+' | '*'] - if self.value == "[": - self._gettoken() - a, z = self._parse_rhs() - self._expect(tokenize.OP, "]") - # Make a transition from the beginning to the end so it is possible to - # advance for free to the next state of this item # without consuming - # anything from the rhs. - a.add_arc(z) - return a, z - else: - a, z = self._parse_atom() - value = self.value - if value not in ("+", "*"): - return a, z - self._gettoken() - z.add_arc(a) - if value == "+": - # Create a cycle to the beginning so we go back to the old state in this - # item and repeat. - return a, z - else: - # The end state is the same as the beginning, so we can cycle arbitrarily - # and end in the beginning if necessary. - return a, a - - def _parse_atom(self): - # atom: '(' rhs ')' | NAME | STRING - if self.value == "(": - self._gettoken() - a, z = self._parse_rhs() - self._expect(tokenize.OP, ")") - return a, z - elif self.type in (tokenize.NAME, tokenize.STRING): - a = NFAState(self._current_rule_name) - z = NFAState(self._current_rule_name) - # We can transit to the next state only if we consume the value. - a.add_arc(z, self.value) - self._gettoken() - return a, z - else: - self._raise_error( - "expected (...) or NAME or STRING, got {} ({})", - self._translation_table.get(self.type, self.type), - self.value, - ) - - def _expect(self, type_, value=None): - if self.type != type_: - self._raise_error( - "expected {}, got {} ({})", - self._translation_table.get(type_, type_), - self._translation_table.get(self.type, self.type), - self.value, - ) - if value is not None and self.value != value: - self._raise_error("expected {}, got {}", value, self.value) - value = self.value - self._gettoken() - return value - - def _gettoken(self): - tup = next(self.generator) - while tup[0] in (tokenize.COMMENT, tokenize.NL): - tup = next(self.generator) - self.type, self.value, self.begin, self.end, self.line = tup - - def _raise_error(self, msg, *args): - if args: - try: - msg = msg.format(*args) - except Exception: - msg = " ".join([msg] + list(map(str, args))) - line = self.grammar.splitlines()[self.begin[0] - 1] - raise SyntaxError(msg, ("", self.begin[0], self.begin[1], line)) diff --git a/Parser/pgen/pgen.py b/Parser/pgen/pgen.py deleted file mode 100644 index 03032d4ed8ccf..0000000000000 --- a/Parser/pgen/pgen.py +++ /dev/null @@ -1,310 +0,0 @@ -"""Python parser generator - - -This parser generator transforms a Python grammar file into parsing tables -that can be consumed by Python's LL(1) parser written in C. - -Concepts --------- - -* An LL(1) parser (Left-to-right, Leftmost derivation, 1 token-lookahead) is a - top-down parser for a subset of context-free languages. It parses the input - from Left to right, performing Leftmost derivation of the sentence, and can - only use 1 token of lookahead when parsing a sentence. - -* A parsing table is a collection of data that a generic implementation of the - LL(1) parser consumes to know how to parse a given context-free grammar. In - this case the collection of data involves Deterministic Finite Automatons, - calculated first sets, keywords and transition labels. - -* A grammar is defined by production rules (or just 'productions') that specify - which symbols may replace which other symbols; these rules may be used to - generate strings, or to parse them. Each such rule has a head, or left-hand - side, which consists of the string that may be replaced, and a body, or - right-hand side, which consists of a string that may replace it. In the - Python grammar, rules are written in the form - - rule_name: rule_description; - - meaning the rule 'a: b' specifies that a can be replaced by b. A context-free - grammar is a grammar in which the left-hand side of each production rule - consists of only a single nonterminal symbol. Context-free grammars can - always be recognized by a Non-Deterministic Automatons. - -* Terminal symbols are literal symbols which may appear in the outputs of the - production rules of the grammar and which cannot be changed using the rules - of the grammar. Applying the rules recursively to a source string of symbols - will usually terminate in a final output string consisting only of terminal - symbols. - -* Nonterminal symbols are those symbols which can be replaced. The grammar - includes a start symbol a designated member of the set of nonterminals from - which all the strings in the language may be derived by successive - applications of the production rules. - -* The language defined by the grammar is defined as the set of terminal strings - that can be derived using the production rules. - -* The first sets of a rule (FIRST(rule)) are defined to be the set of terminals - that can appear in the first position of any string derived from the rule. - This is useful for LL(1) parsers as the parser is only allowed to look at the - next token in the input to know which rule needs to parse. For example, given - this grammar: - - start: '(' A | B ')' - A: 'a' '<' - B: 'b' '<' - - and the input '(b<)' the parser can only look at 'b' to know if it needs - to parse A o B. Because FIRST(A) = {'a'} and FIRST(B) = {'b'} it knows - that needs to continue parsing rule B because only that rule can start - with 'b'. - -Description ------------ - -The input for the parser generator is a grammar in extended BNF form (using * -for repetition, + for at-least-once repetition, [] for optional parts, | for -alternatives and () for grouping). - -Each rule in the grammar file is considered as a regular expression in its -own right. It is turned into a Non-deterministic Finite Automaton (NFA), -which is then turned into a Deterministic Finite Automaton (DFA), which is -then optimized to reduce the number of states. See [Aho&Ullman 77] chapter 3, -or similar compiler books (this technique is more often used for lexical -analyzers). - -The DFA's are used by the parser as parsing tables in a special way that's -probably unique. Before they are usable, the FIRST sets of all non-terminals -are computed so the LL(1) parser consuming the parsing tables can distinguish -between different transitions. -Reference ---------- - -[Aho&Ullman 77] - Aho&Ullman, Principles of Compiler Design, Addison-Wesley 1977 - (first edition) -""" - -from ast import literal_eval -import collections - -from . import grammar, token -from .automata import DFA -from .metaparser import GrammarParser - -import enum - - -class LabelType(enum.Enum): - NONTERMINAL = 0 - NAMED_TOKEN = 1 - KEYWORD = 2 - OPERATOR = 3 - NONE = 4 - - -class Label(str): - def __init__(self, value): - self.type = self._get_type() - - def _get_type(self): - if self[0].isalpha(): - if self.upper() == self: - # NAMED tokens (ASYNC, NAME...) are all uppercase by convention - return LabelType.NAMED_TOKEN - else: - # If is not uppercase it must be a non terminal. - return LabelType.NONTERMINAL - else: - # Keywords and operators are wrapped in quotes - assert self[0] == self[-1] in ('"', "'"), self - value = literal_eval(self) - if value[0].isalpha(): - return LabelType.KEYWORD - else: - return LabelType.OPERATOR - - def __repr__(self): - return "{}({})".format(self.type, super().__repr__()) - - -class ParserGenerator(object): - def __init__(self, grammar_file, token_file, verbose=False, graph_file=None): - with open(grammar_file) as f: - self.grammar = f.read() - with open(token_file) as tok_file: - token_lines = tok_file.readlines() - self.tokens = dict(token.generate_tokens(token_lines)) - self.opmap = dict(token.generate_opmap(token_lines)) - # Manually add <> so it does not collide with != - self.opmap["<>"] = "NOTEQUAL" - self.verbose = verbose - self.filename = grammar_file - self.graph_file = graph_file - self.dfas, self.startsymbol = self.create_dfas() - self.first = {} # map from symbol name to set of tokens - self.calculate_first_sets() - - def create_dfas(self): - rule_to_dfas = collections.OrderedDict() - start_nonterminal = None - for nfa in GrammarParser(self.grammar).parse(): - if self.verbose: - print("Dump of NFA for", nfa.name) - nfa.dump() - if self.graph_file is not None: - nfa.dump_graph(self.graph_file.write) - dfa = DFA.from_nfa(nfa) - if self.verbose: - print("Dump of DFA for", dfa.name) - dfa.dump() - dfa.simplify() - if self.graph_file is not None: - dfa.dump_graph(self.graph_file.write) - rule_to_dfas[dfa.name] = dfa - - if start_nonterminal is None: - start_nonterminal = dfa.name - - return rule_to_dfas, start_nonterminal - - def make_grammar(self): - c = grammar.Grammar() - c.all_labels = set() - names = list(self.dfas.keys()) - names.remove(self.startsymbol) - names.insert(0, self.startsymbol) - for name in names: - i = 256 + len(c.symbol2number) - c.symbol2number[Label(name)] = i - c.number2symbol[i] = Label(name) - c.all_labels.add(name) - for name in names: - self.make_label(c, name) - dfa = self.dfas[name] - states = [] - for state in dfa: - arcs = [] - for label, next in sorted(state.arcs.items()): - c.all_labels.add(label) - arcs.append((self.make_label(c, label), dfa.states.index(next))) - if state.is_final: - arcs.append((0, dfa.states.index(state))) - states.append(arcs) - c.states.append(states) - c.dfas[c.symbol2number[name]] = (states, self.make_first_sets(c, name)) - c.start = c.symbol2number[self.startsymbol] - - if self.verbose: - print("") - print("Grammar summary") - print("===============") - - print("- {n_labels} labels".format(n_labels=len(c.labels))) - print("- {n_dfas} dfas".format(n_dfas=len(c.dfas))) - print("- {n_tokens} tokens".format(n_tokens=len(c.tokens))) - print("- {n_keywords} keywords".format(n_keywords=len(c.keywords))) - print( - "- Start symbol: {start_symbol}".format( - start_symbol=c.number2symbol[c.start] - ) - ) - return c - - def make_first_sets(self, c, name): - rawfirst = self.first[name] - first = set() - for label in sorted(rawfirst): - ilabel = self.make_label(c, label) - ##assert ilabel not in first # XXX failed on <> ... != - first.add(ilabel) - return first - - def make_label(self, c, label): - label = Label(label) - ilabel = len(c.labels) - - if label.type == LabelType.NONTERMINAL: - if label in c.symbol2label: - return c.symbol2label[label] - else: - c.labels.append((c.symbol2number[label], None)) - c.symbol2label[label] = ilabel - return ilabel - elif label.type == LabelType.NAMED_TOKEN: - # A named token (NAME, NUMBER, STRING) - itoken = self.tokens.get(label, None) - assert isinstance(itoken, int), label - assert itoken in self.tokens.values(), label - if itoken in c.tokens: - return c.tokens[itoken] - else: - c.labels.append((itoken, None)) - c.tokens[itoken] = ilabel - return ilabel - elif label.type == LabelType.KEYWORD: - # A keyword - value = literal_eval(label) - if value in c.keywords: - return c.keywords[value] - else: - c.labels.append((self.tokens["NAME"], value)) - c.keywords[value] = ilabel - return ilabel - elif label.type == LabelType.OPERATOR: - # An operator (any non-numeric token) - value = literal_eval(label) - tok_name = self.opmap[value] # Fails if unknown token - itoken = self.tokens[tok_name] - if itoken in c.tokens: - return c.tokens[itoken] - else: - c.labels.append((itoken, None)) - c.tokens[itoken] = ilabel - return ilabel - else: - raise ValueError("Cannot categorize label {}".format(label)) - - def calculate_first_sets(self): - names = list(self.dfas.keys()) - for name in names: - if name not in self.first: - self.calculate_first_sets_for_rule(name) - - if self.verbose: - print("First set for {dfa_name}".format(dfa_name=name)) - for item in self.first[name]: - print(" - {terminal}".format(terminal=item)) - - def calculate_first_sets_for_rule(self, name): - dfa = self.dfas[name] - self.first[name] = None # dummy to detect left recursion - state = dfa.states[0] - totalset = set() - overlapcheck = {} - for label, next in state.arcs.items(): - if label in self.dfas: - if label in self.first: - fset = self.first[label] - if fset is None: - raise ValueError("recursion for rule %r" % name) - else: - self.calculate_first_sets_for_rule(label) - fset = self.first[label] - totalset.update(fset) - overlapcheck[label] = fset - else: - totalset.add(label) - overlapcheck[label] = {label} - inverse = {} - for label, itsfirst in overlapcheck.items(): - for symbol in itsfirst: - if symbol in inverse: - raise ValueError( - "rule %s is ambiguous; %s is in the" - " first sets of %s as well as %s" - % (name, symbol, label, inverse[symbol]) - ) - inverse[symbol] = label - self.first[name] = totalset diff --git a/Parser/pgen/token.py b/Parser/pgen/token.py deleted file mode 100644 index 2cff62ce3b23e..0000000000000 --- a/Parser/pgen/token.py +++ /dev/null @@ -1,38 +0,0 @@ -import itertools - - -def generate_tokens(tokens): - numbers = itertools.count(0) - for line in tokens: - line = line.strip() - - if not line or line.startswith("#"): - continue - - name = line.split()[0] - yield (name, next(numbers)) - - yield ("N_TOKENS", next(numbers)) - yield ("NT_OFFSET", 256) - - -def generate_opmap(tokens): - for line in tokens: - line = line.strip() - - if not line or line.startswith("#"): - continue - - pieces = line.split() - - if len(pieces) != 2: - continue - - name, op = pieces - yield (op.strip("'"), name) - - # Yield independently <>. This is needed so it does not collide - # with the token generation in "generate_tokens" because if this - # symbol is included in Grammar/Tokens, it will collide with != - # as it has the same name (NOTEQUAL). - yield ("<>", "NOTEQUAL") diff --git a/Parser/pegen/parse_string.c b/Parser/string_parser.c similarity index 99% rename from Parser/pegen/parse_string.c rename to Parser/string_parser.c index 94241e1965e9a..cb2332bad0e9d 100644 --- a/Parser/pegen/parse_string.c +++ b/Parser/string_parser.c @@ -1,8 +1,8 @@ #include -#include "../tokenizer.h" +#include "tokenizer.h" #include "pegen.h" -#include "parse_string.h" +#include "string_parser.h" //// STRING HANDLING FUNCTIONS //// diff --git a/Parser/pegen/parse_string.h b/Parser/string_parser.h similarity index 100% rename from Parser/pegen/parse_string.h rename to Parser/string_parser.h diff --git a/Programs/_testembed.c b/Programs/_testembed.c index d89f6be6570e3..6f38b6247fb89 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -486,7 +486,6 @@ static int test_init_from_config(void) config.install_signal_handlers = 0; putenv("PYTHONOLDPARSER=1"); - config._use_peg_parser = 0; /* FIXME: test use_environment */ diff --git a/Python/ast.c b/Python/ast.c index 408591f32536f..d7feb8ce852fd 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -550,5295 +550,6 @@ PyAST_Validate(mod_ty mod) return res; } -/* This is done here, so defines like "test" don't interfere with AST use above. */ -#include "grammar.h" -#include "parsetok.h" -#include "graminit.h" - -/* Data structure used internally */ -struct compiling { - PyArena *c_arena; /* Arena for allocating memory. */ - PyObject *c_filename; /* filename */ - PyObject *c_normalize; /* Normalization function from unicodedata. */ - int c_feature_version; /* Latest minor version of Python for allowed features */ -}; - -static asdl_seq *seq_for_testlist(struct compiling *, const node *); -static expr_ty ast_for_expr(struct compiling *, const node *); -static stmt_ty ast_for_stmt(struct compiling *, const node *); -static asdl_seq *ast_for_suite(struct compiling *c, const node *n); -static asdl_seq *ast_for_exprlist(struct compiling *, const node *, - expr_context_ty); -static expr_ty ast_for_testlist(struct compiling *, const node *); -static stmt_ty ast_for_classdef(struct compiling *, const node *, asdl_seq *); - -static stmt_ty ast_for_with_stmt(struct compiling *, const node *, bool); -static stmt_ty ast_for_for_stmt(struct compiling *, const node *, bool); - -/* Note different signature for ast_for_call */ -static expr_ty ast_for_call(struct compiling *, const node *, expr_ty, - const node *, const node *, const node *); - -static PyObject *parsenumber(struct compiling *, const char *); -static expr_ty parsestrplus(struct compiling *, const node *n); -static void get_last_end_pos(asdl_seq *, int *, int *); - -#define COMP_GENEXP 0 -#define COMP_LISTCOMP 1 -#define COMP_SETCOMP 2 - -static int -init_normalization(struct compiling *c) -{ - PyObject *m = PyImport_ImportModuleNoBlock("unicodedata"); - if (!m) - return 0; - c->c_normalize = PyObject_GetAttrString(m, "normalize"); - Py_DECREF(m); - if (!c->c_normalize) - return 0; - return 1; -} - -static identifier -new_identifier(const char *n, struct compiling *c) -{ - PyObject *id = PyUnicode_DecodeUTF8(n, strlen(n), NULL); - if (!id) - return NULL; - /* PyUnicode_DecodeUTF8 should always return a ready string. */ - assert(PyUnicode_IS_READY(id)); - /* Check whether there are non-ASCII characters in the - identifier; if so, normalize to NFKC. */ - if (!PyUnicode_IS_ASCII(id)) { - PyObject *id2; - if (!c->c_normalize && !init_normalization(c)) { - Py_DECREF(id); - return NULL; - } - PyObject *form = PyUnicode_InternFromString("NFKC"); - if (form == NULL) { - Py_DECREF(id); - return NULL; - } - PyObject *args[2] = {form, id}; - id2 = _PyObject_FastCall(c->c_normalize, args, 2); - Py_DECREF(id); - Py_DECREF(form); - if (!id2) - return NULL; - if (!PyUnicode_Check(id2)) { - PyErr_Format(PyExc_TypeError, - "unicodedata.normalize() must return a string, not " - "%.200s", - _PyType_Name(Py_TYPE(id2))); - Py_DECREF(id2); - return NULL; - } - id = id2; - } - PyUnicode_InternInPlace(&id); - if (PyArena_AddPyObject(c->c_arena, id) < 0) { - Py_DECREF(id); - return NULL; - } - return id; -} - -#define NEW_IDENTIFIER(n) new_identifier(STR(n), c) - -static int -ast_error(struct compiling *c, const node *n, const char *errmsg, ...) -{ - PyObject *value, *errstr, *loc, *tmp; - va_list va; - - va_start(va, errmsg); - errstr = PyUnicode_FromFormatV(errmsg, va); - va_end(va); - if (!errstr) { - return 0; - } - loc = PyErr_ProgramTextObject(c->c_filename, LINENO(n)); - if (!loc) { - Py_INCREF(Py_None); - loc = Py_None; - } - tmp = Py_BuildValue("(OiiN)", c->c_filename, LINENO(n), n->n_col_offset + 1, loc); - if (!tmp) { - Py_DECREF(errstr); - return 0; - } - value = PyTuple_Pack(2, errstr, tmp); - Py_DECREF(errstr); - Py_DECREF(tmp); - if (value) { - PyErr_SetObject(PyExc_SyntaxError, value); - Py_DECREF(value); - } - return 0; -} - -/* num_stmts() returns number of contained statements. - - Use this routine to determine how big a sequence is needed for - the statements in a parse tree. Its raison d'etre is this bit of - grammar: - - stmt: simple_stmt | compound_stmt - simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE - - A simple_stmt can contain multiple small_stmt elements joined - by semicolons. If the arg is a simple_stmt, the number of - small_stmt elements is returned. -*/ - -static string -new_type_comment(const char *s, struct compiling *c) -{ - PyObject *res = PyUnicode_DecodeUTF8(s, strlen(s), NULL); - if (res == NULL) - return NULL; - if (PyArena_AddPyObject(c->c_arena, res) < 0) { - Py_DECREF(res); - return NULL; - } - return res; -} -#define NEW_TYPE_COMMENT(n) new_type_comment(STR(n), c) - -static int -num_stmts(const node *n) -{ - int i, l; - node *ch; - - switch (TYPE(n)) { - case single_input: - if (TYPE(CHILD(n, 0)) == NEWLINE) - return 0; - else - return num_stmts(CHILD(n, 0)); - case file_input: - l = 0; - for (i = 0; i < NCH(n); i++) { - ch = CHILD(n, i); - if (TYPE(ch) == stmt) - l += num_stmts(ch); - } - return l; - case stmt: - return num_stmts(CHILD(n, 0)); - case compound_stmt: - return 1; - case simple_stmt: - return NCH(n) / 2; /* Divide by 2 to remove count of semi-colons */ - case suite: - case func_body_suite: - /* func_body_suite: simple_stmt | NEWLINE [TYPE_COMMENT NEWLINE] INDENT stmt+ DEDENT */ - /* suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT */ - if (NCH(n) == 1) - return num_stmts(CHILD(n, 0)); - else { - i = 2; - l = 0; - if (TYPE(CHILD(n, 1)) == TYPE_COMMENT) - i += 2; - for (; i < (NCH(n) - 1); i++) - l += num_stmts(CHILD(n, i)); - return l; - } - default: { - _Py_FatalErrorFormat(__func__, "Non-statement found: %d %d", - TYPE(n), NCH(n)); - } - } - Py_UNREACHABLE(); -} - -/* Transform the CST rooted at node * to the appropriate AST -*/ - -mod_ty -PyAST_FromNodeObject(const node *n, PyCompilerFlags *flags, - PyObject *filename, PyArena *arena) -{ - int i, j, k, num; - asdl_seq *stmts = NULL; - asdl_seq *type_ignores = NULL; - stmt_ty s; - node *ch; - struct compiling c; - mod_ty res = NULL; - asdl_seq *argtypes = NULL; - expr_ty ret, arg; - - c.c_arena = arena; - /* borrowed reference */ - c.c_filename = filename; - c.c_normalize = NULL; - c.c_feature_version = flags ? flags->cf_feature_version : PY_MINOR_VERSION; - - if (TYPE(n) == encoding_decl) - n = CHILD(n, 0); - - k = 0; - switch (TYPE(n)) { - case file_input: - stmts = _Py_asdl_seq_new(num_stmts(n), arena); - if (!stmts) - goto out; - for (i = 0; i < NCH(n) - 1; i++) { - ch = CHILD(n, i); - if (TYPE(ch) == NEWLINE) - continue; - REQ(ch, stmt); - num = num_stmts(ch); - if (num == 1) { - s = ast_for_stmt(&c, ch); - if (!s) - goto out; - asdl_seq_SET(stmts, k++, s); - } - else { - ch = CHILD(ch, 0); - REQ(ch, simple_stmt); - for (j = 0; j < num; j++) { - s = ast_for_stmt(&c, CHILD(ch, j * 2)); - if (!s) - goto out; - asdl_seq_SET(stmts, k++, s); - } - } - } - - /* Type ignores are stored under the ENDMARKER in file_input. */ - ch = CHILD(n, NCH(n) - 1); - REQ(ch, ENDMARKER); - num = NCH(ch); - type_ignores = _Py_asdl_seq_new(num, arena); - if (!type_ignores) - goto out; - - for (i = 0; i < num; i++) { - string type_comment = new_type_comment(STR(CHILD(ch, i)), &c); - if (!type_comment) - goto out; - type_ignore_ty ti = TypeIgnore(LINENO(CHILD(ch, i)), type_comment, arena); - if (!ti) - goto out; - asdl_seq_SET(type_ignores, i, ti); - } - - res = Module(stmts, type_ignores, arena); - break; - case eval_input: { - expr_ty testlist_ast; - - /* XXX Why not comp_for here? */ - testlist_ast = ast_for_testlist(&c, CHILD(n, 0)); - if (!testlist_ast) - goto out; - res = Expression(testlist_ast, arena); - break; - } - case single_input: - if (TYPE(CHILD(n, 0)) == NEWLINE) { - stmts = _Py_asdl_seq_new(1, arena); - if (!stmts) - goto out; - asdl_seq_SET(stmts, 0, Pass(n->n_lineno, n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - arena)); - if (!asdl_seq_GET(stmts, 0)) - goto out; - res = Interactive(stmts, arena); - } - else { - n = CHILD(n, 0); - num = num_stmts(n); - stmts = _Py_asdl_seq_new(num, arena); - if (!stmts) - goto out; - if (num == 1) { - s = ast_for_stmt(&c, n); - if (!s) - goto out; - asdl_seq_SET(stmts, 0, s); - } - else { - /* Only a simple_stmt can contain multiple statements. */ - REQ(n, simple_stmt); - for (i = 0; i < NCH(n); i += 2) { - if (TYPE(CHILD(n, i)) == NEWLINE) - break; - s = ast_for_stmt(&c, CHILD(n, i)); - if (!s) - goto out; - asdl_seq_SET(stmts, i / 2, s); - } - } - - res = Interactive(stmts, arena); - } - break; - case func_type_input: - n = CHILD(n, 0); - REQ(n, func_type); - - if (TYPE(CHILD(n, 1)) == typelist) { - ch = CHILD(n, 1); - /* this is overly permissive -- we don't pay any attention to - * stars on the args -- just parse them into an ordered list */ - num = 0; - for (i = 0; i < NCH(ch); i++) { - if (TYPE(CHILD(ch, i)) == test) { - num++; - } - } - - argtypes = _Py_asdl_seq_new(num, arena); - if (!argtypes) - goto out; - - j = 0; - for (i = 0; i < NCH(ch); i++) { - if (TYPE(CHILD(ch, i)) == test) { - arg = ast_for_expr(&c, CHILD(ch, i)); - if (!arg) - goto out; - asdl_seq_SET(argtypes, j++, arg); - } - } - } - else { - argtypes = _Py_asdl_seq_new(0, arena); - if (!argtypes) - goto out; - } - - ret = ast_for_expr(&c, CHILD(n, NCH(n) - 1)); - if (!ret) - goto out; - res = FunctionType(argtypes, ret, arena); - break; - default: - PyErr_Format(PyExc_SystemError, - "invalid node %d for PyAST_FromNode", TYPE(n)); - goto out; - } - out: - if (c.c_normalize) { - Py_DECREF(c.c_normalize); - } - return res; -} - -mod_ty -PyAST_FromNode(const node *n, PyCompilerFlags *flags, const char *filename_str, - PyArena *arena) -{ - mod_ty mod; - PyObject *filename; - filename = PyUnicode_DecodeFSDefault(filename_str); - if (filename == NULL) - return NULL; - mod = PyAST_FromNodeObject(n, flags, filename, arena); - Py_DECREF(filename); - return mod; - -} - -/* Return the AST repr. of the operator represented as syntax (|, ^, etc.) -*/ - -static operator_ty -get_operator(struct compiling *c, const node *n) -{ - switch (TYPE(n)) { - case VBAR: - return BitOr; - case CIRCUMFLEX: - return BitXor; - case AMPER: - return BitAnd; - case LEFTSHIFT: - return LShift; - case RIGHTSHIFT: - return RShift; - case PLUS: - return Add; - case MINUS: - return Sub; - case STAR: - return Mult; - case AT: - if (c->c_feature_version < 5) { - ast_error(c, n, - "The '@' operator is only supported in Python 3.5 and greater"); - return (operator_ty)0; - } - return MatMult; - case SLASH: - return Div; - case DOUBLESLASH: - return FloorDiv; - case PERCENT: - return Mod; - default: - return (operator_ty)0; - } -} - -static const char * const FORBIDDEN[] = { - "None", - "True", - "False", - "__debug__", - NULL, -}; - -static int -forbidden_name(struct compiling *c, identifier name, const node *n, - int full_checks) -{ - assert(PyUnicode_Check(name)); - const char * const *p = FORBIDDEN; - if (!full_checks) { - /* In most cases, the parser will protect True, False, and None - from being assign to. */ - p += 3; - } - for (; *p; p++) { - if (_PyUnicode_EqualToASCIIString(name, *p)) { - ast_error(c, n, "cannot assign to %U", name); - return 1; - } - } - return 0; -} - -static expr_ty -copy_location(expr_ty e, const node *n, const node *end) -{ - if (e) { - e->lineno = LINENO(n); - e->col_offset = n->n_col_offset; - e->end_lineno = end->n_end_lineno; - e->end_col_offset = end->n_end_col_offset; - } - return e; -} - -static const char * -get_expr_name(expr_ty e) -{ - switch (e->kind) { - case Attribute_kind: - return "attribute"; - case Subscript_kind: - return "subscript"; - case Starred_kind: - return "starred"; - case Name_kind: - return "name"; - case List_kind: - return "list"; - case Tuple_kind: - return "tuple"; - case Lambda_kind: - return "lambda"; - case Call_kind: - return "function call"; - case BoolOp_kind: - case BinOp_kind: - case UnaryOp_kind: - return "operator"; - case GeneratorExp_kind: - return "generator expression"; - case Yield_kind: - case YieldFrom_kind: - return "yield expression"; - case Await_kind: - return "await expression"; - case ListComp_kind: - return "list comprehension"; - case SetComp_kind: - return "set comprehension"; - case DictComp_kind: - return "dict comprehension"; - case Dict_kind: - return "dict display"; - case Set_kind: - return "set display"; - case JoinedStr_kind: - case FormattedValue_kind: - return "f-string expression"; - case Constant_kind: { - PyObject *value = e->v.Constant.value; - if (value == Py_None) { - return "None"; - } - if (value == Py_False) { - return "False"; - } - if (value == Py_True) { - return "True"; - } - if (value == Py_Ellipsis) { - return "Ellipsis"; - } - return "literal"; - } - case Compare_kind: - return "comparison"; - case IfExp_kind: - return "conditional expression"; - case NamedExpr_kind: - return "named expression"; - default: - PyErr_Format(PyExc_SystemError, - "unexpected expression in assignment %d (line %d)", - e->kind, e->lineno); - return NULL; - } -} - -/* Set the context ctx for expr_ty e, recursively traversing e. - - Only sets context for expr kinds that "can appear in assignment context" - (according to ../Parser/Python.asdl). For other expr kinds, it sets - an appropriate syntax error and returns false. -*/ - -static int -set_context(struct compiling *c, expr_ty e, expr_context_ty ctx, const node *n) -{ - asdl_seq *s = NULL; - - /* Expressions in an augmented assignment have a Store context. */ - - switch (e->kind) { - case Attribute_kind: - e->v.Attribute.ctx = ctx; - if (ctx == Store && forbidden_name(c, e->v.Attribute.attr, n, 1)) - return 0; - break; - case Subscript_kind: - e->v.Subscript.ctx = ctx; - break; - case Starred_kind: - e->v.Starred.ctx = ctx; - if (!set_context(c, e->v.Starred.value, ctx, n)) - return 0; - break; - case Name_kind: - if (ctx == Store) { - if (forbidden_name(c, e->v.Name.id, n, 0)) - return 0; /* forbidden_name() calls ast_error() */ - } - e->v.Name.ctx = ctx; - break; - case List_kind: - e->v.List.ctx = ctx; - s = e->v.List.elts; - break; - case Tuple_kind: - e->v.Tuple.ctx = ctx; - s = e->v.Tuple.elts; - break; - default: { - const char *expr_name = get_expr_name(e); - if (expr_name != NULL) { - ast_error(c, n, "cannot %s %s", - ctx == Store ? "assign to" : "delete", - expr_name); - } - return 0; - } - } - - /* If the LHS is a list or tuple, we need to set the assignment - context for all the contained elements. - */ - if (s) { - Py_ssize_t i; - - for (i = 0; i < asdl_seq_LEN(s); i++) { - if (!set_context(c, (expr_ty)asdl_seq_GET(s, i), ctx, n)) - return 0; - } - } - return 1; -} - -static operator_ty -ast_for_augassign(struct compiling *c, const node *n) -{ - REQ(n, augassign); - n = CHILD(n, 0); - switch (STR(n)[0]) { - case '+': - return Add; - case '-': - return Sub; - case '/': - if (STR(n)[1] == '/') - return FloorDiv; - else - return Div; - case '%': - return Mod; - case '<': - return LShift; - case '>': - return RShift; - case '&': - return BitAnd; - case '^': - return BitXor; - case '|': - return BitOr; - case '*': - if (STR(n)[1] == '*') - return Pow; - else - return Mult; - case '@': - if (c->c_feature_version < 5) { - ast_error(c, n, - "The '@' operator is only supported in Python 3.5 and greater"); - return (operator_ty)0; - } - return MatMult; - default: - PyErr_Format(PyExc_SystemError, "invalid augassign: %s", STR(n)); - return (operator_ty)0; - } -} - -static cmpop_ty -ast_for_comp_op(struct compiling *c, const node *n) -{ - /* comp_op: '<'|'>'|'=='|'>='|'<='|'!='|'in'|'not' 'in'|'is' - |'is' 'not' - */ - REQ(n, comp_op); - if (NCH(n) == 1) { - n = CHILD(n, 0); - switch (TYPE(n)) { - case LESS: - return Lt; - case GREATER: - return Gt; - case EQEQUAL: /* == */ - return Eq; - case LESSEQUAL: - return LtE; - case GREATEREQUAL: - return GtE; - case NOTEQUAL: - return NotEq; - case NAME: - if (strcmp(STR(n), "in") == 0) - return In; - if (strcmp(STR(n), "is") == 0) - return Is; - /* fall through */ - default: - PyErr_Format(PyExc_SystemError, "invalid comp_op: %s", - STR(n)); - return (cmpop_ty)0; - } - } - else if (NCH(n) == 2) { - /* handle "not in" and "is not" */ - switch (TYPE(CHILD(n, 0))) { - case NAME: - if (strcmp(STR(CHILD(n, 1)), "in") == 0) - return NotIn; - if (strcmp(STR(CHILD(n, 0)), "is") == 0) - return IsNot; - /* fall through */ - default: - PyErr_Format(PyExc_SystemError, "invalid comp_op: %s %s", - STR(CHILD(n, 0)), STR(CHILD(n, 1))); - return (cmpop_ty)0; - } - } - PyErr_Format(PyExc_SystemError, "invalid comp_op: has %d children", - NCH(n)); - return (cmpop_ty)0; -} - -static asdl_seq * -seq_for_testlist(struct compiling *c, const node *n) -{ - /* testlist: test (',' test)* [','] - testlist_star_expr: test|star_expr (',' test|star_expr)* [','] - */ - asdl_seq *seq; - expr_ty expression; - int i; - assert(TYPE(n) == testlist || TYPE(n) == testlist_star_expr || TYPE(n) == testlist_comp); - - seq = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena); - if (!seq) - return NULL; - - for (i = 0; i < NCH(n); i += 2) { - const node *ch = CHILD(n, i); - assert(TYPE(ch) == test || TYPE(ch) == test_nocond || TYPE(ch) == star_expr || TYPE(ch) == namedexpr_test); - - expression = ast_for_expr(c, ch); - if (!expression) - return NULL; - - assert(i / 2 < seq->size); - asdl_seq_SET(seq, i / 2, expression); - } - return seq; -} - -static arg_ty -ast_for_arg(struct compiling *c, const node *n) -{ - identifier name; - expr_ty annotation = NULL; - node *ch; - arg_ty ret; - - assert(TYPE(n) == tfpdef || TYPE(n) == vfpdef); - ch = CHILD(n, 0); - name = NEW_IDENTIFIER(ch); - if (!name) - return NULL; - if (forbidden_name(c, name, ch, 0)) - return NULL; - - if (NCH(n) == 3 && TYPE(CHILD(n, 1)) == COLON) { - annotation = ast_for_expr(c, CHILD(n, 2)); - if (!annotation) - return NULL; - } - - ret = arg(name, annotation, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - if (!ret) - return NULL; - return ret; -} - -/* returns -1 if failed to handle keyword only arguments - returns new position to keep processing if successful - (',' tfpdef ['=' test])* - ^^^ - start pointing here - */ -static int -handle_keywordonly_args(struct compiling *c, const node *n, int start, - asdl_seq *kwonlyargs, asdl_seq *kwdefaults) -{ - PyObject *argname; - node *ch; - expr_ty expression, annotation; - arg_ty arg = NULL; - int i = start; - int j = 0; /* index for kwdefaults and kwonlyargs */ - - if (kwonlyargs == NULL) { - ast_error(c, CHILD(n, start), "named arguments must follow bare *"); - return -1; - } - assert(kwdefaults != NULL); - while (i < NCH(n)) { - ch = CHILD(n, i); - switch (TYPE(ch)) { - case vfpdef: - case tfpdef: - if (i + 1 < NCH(n) && TYPE(CHILD(n, i + 1)) == EQUAL) { - expression = ast_for_expr(c, CHILD(n, i + 2)); - if (!expression) - goto error; - asdl_seq_SET(kwdefaults, j, expression); - i += 2; /* '=' and test */ - } - else { /* setting NULL if no default value exists */ - asdl_seq_SET(kwdefaults, j, NULL); - } - if (NCH(ch) == 3) { - /* ch is NAME ':' test */ - annotation = ast_for_expr(c, CHILD(ch, 2)); - if (!annotation) - goto error; - } - else { - annotation = NULL; - } - ch = CHILD(ch, 0); - argname = NEW_IDENTIFIER(ch); - if (!argname) - goto error; - if (forbidden_name(c, argname, ch, 0)) - goto error; - arg = arg(argname, annotation, NULL, LINENO(ch), ch->n_col_offset, - ch->n_end_lineno, ch->n_end_col_offset, - c->c_arena); - if (!arg) - goto error; - asdl_seq_SET(kwonlyargs, j++, arg); - i += 1; /* the name */ - if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA) - i += 1; /* the comma, if present */ - break; - case TYPE_COMMENT: - /* arg will be equal to the last argument processed */ - arg->type_comment = NEW_TYPE_COMMENT(ch); - if (!arg->type_comment) - goto error; - i += 1; - break; - case DOUBLESTAR: - return i; - default: - ast_error(c, ch, "unexpected node"); - goto error; - } - } - return i; - error: - return -1; -} - -/* Create AST for argument list. */ - -static arguments_ty -ast_for_arguments(struct compiling *c, const node *n) -{ - /* This function handles both typedargslist (function definition) - and varargslist (lambda definition). - - parameters: '(' [typedargslist] ')' - - The following definition for typedarglist is equivalent to this set of rules: - - arguments = argument (',' [TYPE_COMMENT] argument)* - argument = tfpdef ['=' test] - kwargs = '**' tfpdef [','] [TYPE_COMMENT] - args = '*' [tfpdef] - kwonly_kwargs = (',' [TYPE_COMMENT] argument)* (TYPE_COMMENT | [',' - [TYPE_COMMENT] [kwargs]]) - args_kwonly_kwargs = args kwonly_kwargs | kwargs - poskeyword_args_kwonly_kwargs = arguments ( TYPE_COMMENT | [',' - [TYPE_COMMENT] [args_kwonly_kwargs]]) - typedargslist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs - typedarglist = (arguments ',' [TYPE_COMMENT] '/' [',' [[TYPE_COMMENT] - typedargslist_no_posonly]])|(typedargslist_no_posonly)" - - typedargslist: ( (tfpdef ['=' test] (',' [TYPE_COMMENT] tfpdef ['=' test])* - ',' [TYPE_COMMENT] '/' [',' [ [TYPE_COMMENT] tfpdef ['=' test] ( ',' - [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] [ '*' - [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' - [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]]) | '**' tfpdef [','] - [TYPE_COMMENT]]]) | '*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* - (TYPE_COMMENT | [',' [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]]) | - '**' tfpdef [','] [TYPE_COMMENT]]] ) | (tfpdef ['=' test] (',' - [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' [TYPE_COMMENT] [ '*' - [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* (TYPE_COMMENT | [',' - [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]]) | '**' tfpdef [','] - [TYPE_COMMENT]]]) | '*' [tfpdef] (',' [TYPE_COMMENT] tfpdef ['=' test])* - (TYPE_COMMENT | [',' [TYPE_COMMENT] ['**' tfpdef [','] [TYPE_COMMENT]]]) | - '**' tfpdef [','] [TYPE_COMMENT])) - - tfpdef: NAME [':' test] - - The following definition for varargslist is equivalent to this set of rules: - - arguments = argument (',' argument )* - argument = vfpdef ['=' test] - kwargs = '**' vfpdef [','] - args = '*' [vfpdef] - kwonly_kwargs = (',' argument )* [',' [kwargs]] - args_kwonly_kwargs = args kwonly_kwargs | kwargs - poskeyword_args_kwonly_kwargs = arguments [',' [args_kwonly_kwargs]] - vararglist_no_posonly = poskeyword_args_kwonly_kwargs | args_kwonly_kwargs - varargslist = arguments ',' '/' [','[(vararglist_no_posonly)]] | - (vararglist_no_posonly) - - varargslist: vfpdef ['=' test ](',' vfpdef ['=' test])* ',' '/' [',' [ (vfpdef ['=' - test] (',' vfpdef ['=' test])* [',' [ '*' [vfpdef] (',' vfpdef ['=' test])* [',' - ['**' vfpdef [',']]] | '**' vfpdef [',']]] | '*' [vfpdef] (',' vfpdef ['=' test])* - [',' ['**' vfpdef [',']]] | '**' vfpdef [',']) ]] | (vfpdef ['=' test] (',' vfpdef - ['=' test])* [',' [ '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef [',']]] - | '**' vfpdef [',']]] | '*' [vfpdef] (',' vfpdef ['=' test])* [',' ['**' vfpdef - [',']]] | '**' vfpdef [',']) - - vfpdef: NAME - - */ - int i, j, k, l, nposonlyargs=0, nposargs = 0, nkwonlyargs = 0; - int nposdefaults = 0, found_default = 0; - asdl_seq *posonlyargs, *posargs, *posdefaults, *kwonlyargs, *kwdefaults; - arg_ty vararg = NULL, kwarg = NULL; - arg_ty arg = NULL; - node *ch; - - if (TYPE(n) == parameters) { - if (NCH(n) == 2) /* () as argument list */ - return arguments(NULL, NULL, NULL, NULL, NULL, NULL, NULL, c->c_arena); - n = CHILD(n, 1); - } - assert(TYPE(n) == typedargslist || TYPE(n) == varargslist); - - /* First count the number of positional args & defaults. The - variable i is the loop index for this for loop and the next. - The next loop picks up where the first leaves off. - */ - for (i = 0; i < NCH(n); i++) { - ch = CHILD(n, i); - if (TYPE(ch) == STAR) { - /* skip star */ - i++; - if (i < NCH(n) && /* skip argument following star */ - (TYPE(CHILD(n, i)) == tfpdef || - TYPE(CHILD(n, i)) == vfpdef)) { - i++; - } - break; - } - if (TYPE(ch) == DOUBLESTAR) break; - if (TYPE(ch) == vfpdef || TYPE(ch) == tfpdef) nposargs++; - if (TYPE(ch) == EQUAL) nposdefaults++; - if (TYPE(ch) == SLASH ) { - nposonlyargs = nposargs; - nposargs = 0; - } - } - /* count the number of keyword only args & - defaults for keyword only args */ - for ( ; i < NCH(n); ++i) { - ch = CHILD(n, i); - if (TYPE(ch) == DOUBLESTAR) break; - if (TYPE(ch) == tfpdef || TYPE(ch) == vfpdef) nkwonlyargs++; - } - posonlyargs = (nposonlyargs ? _Py_asdl_seq_new(nposonlyargs, c->c_arena) : NULL); - if (!posonlyargs && nposonlyargs) { - return NULL; - } - posargs = (nposargs ? _Py_asdl_seq_new(nposargs, c->c_arena) : NULL); - if (!posargs && nposargs) - return NULL; - kwonlyargs = (nkwonlyargs ? - _Py_asdl_seq_new(nkwonlyargs, c->c_arena) : NULL); - if (!kwonlyargs && nkwonlyargs) - return NULL; - posdefaults = (nposdefaults ? - _Py_asdl_seq_new(nposdefaults, c->c_arena) : NULL); - if (!posdefaults && nposdefaults) - return NULL; - /* The length of kwonlyargs and kwdefaults are same - since we set NULL as default for keyword only argument w/o default - - we have sequence data structure, but no dictionary */ - kwdefaults = (nkwonlyargs ? - _Py_asdl_seq_new(nkwonlyargs, c->c_arena) : NULL); - if (!kwdefaults && nkwonlyargs) - return NULL; - - /* tfpdef: NAME [':' test] - vfpdef: NAME - */ - i = 0; - j = 0; /* index for defaults */ - k = 0; /* index for args */ - l = 0; /* index for posonlyargs */ - while (i < NCH(n)) { - ch = CHILD(n, i); - switch (TYPE(ch)) { - case tfpdef: - case vfpdef: - /* XXX Need to worry about checking if TYPE(CHILD(n, i+1)) is - anything other than EQUAL or a comma? */ - /* XXX Should NCH(n) check be made a separate check? */ - if (i + 1 < NCH(n) && TYPE(CHILD(n, i + 1)) == EQUAL) { - expr_ty expression = ast_for_expr(c, CHILD(n, i + 2)); - if (!expression) - return NULL; - assert(posdefaults != NULL); - asdl_seq_SET(posdefaults, j++, expression); - i += 2; - found_default = 1; - } - else if (found_default) { - ast_error(c, n, - "non-default argument follows default argument"); - return NULL; - } - arg = ast_for_arg(c, ch); - if (!arg) - return NULL; - if (l < nposonlyargs) { - asdl_seq_SET(posonlyargs, l++, arg); - } else { - asdl_seq_SET(posargs, k++, arg); - } - i += 1; /* the name */ - if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA) - i += 1; /* the comma, if present */ - break; - case SLASH: - /* Advance the slash and the comma. If there are more names - * after the slash there will be a comma so we are advancing - * the correct number of nodes. If the slash is the last item, - * we will be advancing an extra token but then * i > NCH(n) - * and the enclosing while will finish correctly. */ - i += 2; - break; - case STAR: - if (i+1 >= NCH(n) || - (i+2 == NCH(n) && (TYPE(CHILD(n, i+1)) == COMMA - || TYPE(CHILD(n, i+1)) == TYPE_COMMENT))) { - ast_error(c, CHILD(n, i), - "named arguments must follow bare *"); - return NULL; - } - ch = CHILD(n, i+1); /* tfpdef or COMMA */ - if (TYPE(ch) == COMMA) { - int res = 0; - i += 2; /* now follows keyword only arguments */ - - if (i < NCH(n) && TYPE(CHILD(n, i)) == TYPE_COMMENT) { - ast_error(c, CHILD(n, i), - "bare * has associated type comment"); - return NULL; - } - - res = handle_keywordonly_args(c, n, i, - kwonlyargs, kwdefaults); - if (res == -1) return NULL; - i = res; /* res has new position to process */ - } - else { - vararg = ast_for_arg(c, ch); - if (!vararg) - return NULL; - - i += 2; /* the star and the name */ - if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA) - i += 1; /* the comma, if present */ - - if (i < NCH(n) && TYPE(CHILD(n, i)) == TYPE_COMMENT) { - vararg->type_comment = NEW_TYPE_COMMENT(CHILD(n, i)); - if (!vararg->type_comment) - return NULL; - i += 1; - } - - if (i < NCH(n) && (TYPE(CHILD(n, i)) == tfpdef - || TYPE(CHILD(n, i)) == vfpdef)) { - int res = 0; - res = handle_keywordonly_args(c, n, i, - kwonlyargs, kwdefaults); - if (res == -1) return NULL; - i = res; /* res has new position to process */ - } - } - break; - case DOUBLESTAR: - ch = CHILD(n, i+1); /* tfpdef */ - assert(TYPE(ch) == tfpdef || TYPE(ch) == vfpdef); - kwarg = ast_for_arg(c, ch); - if (!kwarg) - return NULL; - i += 2; /* the double star and the name */ - if (i < NCH(n) && TYPE(CHILD(n, i)) == COMMA) - i += 1; /* the comma, if present */ - break; - case TYPE_COMMENT: - assert(i); - - if (kwarg) - arg = kwarg; - - /* arg will be equal to the last argument processed */ - arg->type_comment = NEW_TYPE_COMMENT(ch); - if (!arg->type_comment) - return NULL; - i += 1; - break; - default: - PyErr_Format(PyExc_SystemError, - "unexpected node in varargslist: %d @ %d", - TYPE(ch), i); - return NULL; - } - } - return arguments(posonlyargs, posargs, vararg, kwonlyargs, kwdefaults, kwarg, posdefaults, c->c_arena); -} - -static expr_ty -ast_for_decorator(struct compiling *c, const node *n) -{ - /* decorator: '@' namedexpr_test NEWLINE */ - - REQ(n, decorator); - REQ(CHILD(n, 0), AT); - REQ(CHILD(n, 2), NEWLINE); - - return ast_for_expr(c, CHILD(n, 1)); -} - -static asdl_seq* -ast_for_decorators(struct compiling *c, const node *n) -{ - asdl_seq* decorator_seq; - expr_ty d; - int i; - - REQ(n, decorators); - decorator_seq = _Py_asdl_seq_new(NCH(n), c->c_arena); - if (!decorator_seq) - return NULL; - - for (i = 0; i < NCH(n); i++) { - d = ast_for_decorator(c, CHILD(n, i)); - if (!d) - return NULL; - asdl_seq_SET(decorator_seq, i, d); - } - return decorator_seq; -} - -static stmt_ty -ast_for_funcdef_impl(struct compiling *c, const node *n0, - asdl_seq *decorator_seq, bool is_async) -{ - /* funcdef: 'def' NAME parameters ['->' test] ':' [TYPE_COMMENT] suite */ - const node * const n = is_async ? CHILD(n0, 1) : n0; - identifier name; - arguments_ty args; - asdl_seq *body; - expr_ty returns = NULL; - int name_i = 1; - int end_lineno, end_col_offset; - node *tc; - string type_comment = NULL; - - if (is_async && c->c_feature_version < 5) { - ast_error(c, n, - "Async functions are only supported in Python 3.5 and greater"); - return NULL; - } - - REQ(n, funcdef); - - name = NEW_IDENTIFIER(CHILD(n, name_i)); - if (!name) - return NULL; - if (forbidden_name(c, name, CHILD(n, name_i), 0)) - return NULL; - args = ast_for_arguments(c, CHILD(n, name_i + 1)); - if (!args) - return NULL; - if (TYPE(CHILD(n, name_i+2)) == RARROW) { - returns = ast_for_expr(c, CHILD(n, name_i + 3)); - if (!returns) - return NULL; - name_i += 2; - } - if (TYPE(CHILD(n, name_i + 3)) == TYPE_COMMENT) { - type_comment = NEW_TYPE_COMMENT(CHILD(n, name_i + 3)); - if (!type_comment) - return NULL; - name_i += 1; - } - body = ast_for_suite(c, CHILD(n, name_i + 3)); - if (!body) - return NULL; - get_last_end_pos(body, &end_lineno, &end_col_offset); - - if (NCH(CHILD(n, name_i + 3)) > 1) { - /* Check if the suite has a type comment in it. */ - tc = CHILD(CHILD(n, name_i + 3), 1); - - if (TYPE(tc) == TYPE_COMMENT) { - if (type_comment != NULL) { - ast_error(c, n, "Cannot have two type comments on def"); - return NULL; - } - type_comment = NEW_TYPE_COMMENT(tc); - if (!type_comment) - return NULL; - } - } - - if (is_async) - return AsyncFunctionDef(name, args, body, decorator_seq, returns, type_comment, - LINENO(n0), n0->n_col_offset, end_lineno, end_col_offset, c->c_arena); - else - return FunctionDef(name, args, body, decorator_seq, returns, type_comment, - LINENO(n), n->n_col_offset, end_lineno, end_col_offset, c->c_arena); -} - -static stmt_ty -ast_for_async_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq) -{ - /* async_funcdef: ASYNC funcdef */ - REQ(n, async_funcdef); - REQ(CHILD(n, 0), ASYNC); - REQ(CHILD(n, 1), funcdef); - - return ast_for_funcdef_impl(c, n, decorator_seq, - true /* is_async */); -} - -static stmt_ty -ast_for_funcdef(struct compiling *c, const node *n, asdl_seq *decorator_seq) -{ - /* funcdef: 'def' NAME parameters ['->' test] ':' suite */ - return ast_for_funcdef_impl(c, n, decorator_seq, - false /* is_async */); -} - - -static stmt_ty -ast_for_async_stmt(struct compiling *c, const node *n) -{ - /* async_stmt: ASYNC (funcdef | with_stmt | for_stmt) */ - REQ(n, async_stmt); - REQ(CHILD(n, 0), ASYNC); - - switch (TYPE(CHILD(n, 1))) { - case funcdef: - return ast_for_funcdef_impl(c, n, NULL, - true /* is_async */); - case with_stmt: - return ast_for_with_stmt(c, n, - true /* is_async */); - - case for_stmt: - return ast_for_for_stmt(c, n, - true /* is_async */); - - default: - PyErr_Format(PyExc_SystemError, - "invalid async stament: %s", - STR(CHILD(n, 1))); - return NULL; - } -} - -static stmt_ty -ast_for_decorated(struct compiling *c, const node *n) -{ - /* decorated: decorators (classdef | funcdef | async_funcdef) */ - stmt_ty thing = NULL; - asdl_seq *decorator_seq = NULL; - - REQ(n, decorated); - - decorator_seq = ast_for_decorators(c, CHILD(n, 0)); - if (!decorator_seq) - return NULL; - - assert(TYPE(CHILD(n, 1)) == funcdef || - TYPE(CHILD(n, 1)) == async_funcdef || - TYPE(CHILD(n, 1)) == classdef); - - if (TYPE(CHILD(n, 1)) == funcdef) { - thing = ast_for_funcdef(c, CHILD(n, 1), decorator_seq); - } else if (TYPE(CHILD(n, 1)) == classdef) { - thing = ast_for_classdef(c, CHILD(n, 1), decorator_seq); - } else if (TYPE(CHILD(n, 1)) == async_funcdef) { - thing = ast_for_async_funcdef(c, CHILD(n, 1), decorator_seq); - } - return thing; -} - -static expr_ty -ast_for_namedexpr(struct compiling *c, const node *n) -{ - /* namedexpr_test: test [':=' test] - argument: ( test [comp_for] | - test ':=' test | - test '=' test | - '**' test | - '*' test ) - */ - expr_ty target, value; - - target = ast_for_expr(c, CHILD(n, 0)); - if (!target) - return NULL; - - value = ast_for_expr(c, CHILD(n, 2)); - if (!value) - return NULL; - - if (target->kind != Name_kind) { - const char *expr_name = get_expr_name(target); - if (expr_name != NULL) { - ast_error(c, n, "cannot use assignment expressions with %s", expr_name); - } - return NULL; - } - - if (!set_context(c, target, Store, n)) - return NULL; - - return NamedExpr(target, value, LINENO(n), n->n_col_offset, n->n_end_lineno, - n->n_end_col_offset, c->c_arena); -} - -static expr_ty -ast_for_lambdef(struct compiling *c, const node *n) -{ - /* lambdef: 'lambda' [varargslist] ':' test - lambdef_nocond: 'lambda' [varargslist] ':' test_nocond */ - arguments_ty args; - expr_ty expression; - - if (NCH(n) == 3) { - args = arguments(NULL, NULL, NULL, NULL, NULL, NULL, NULL, c->c_arena); - if (!args) - return NULL; - expression = ast_for_expr(c, CHILD(n, 2)); - if (!expression) - return NULL; - } - else { - args = ast_for_arguments(c, CHILD(n, 1)); - if (!args) - return NULL; - expression = ast_for_expr(c, CHILD(n, 3)); - if (!expression) - return NULL; - } - - return Lambda(args, expression, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static expr_ty -ast_for_ifexpr(struct compiling *c, const node *n) -{ - /* test: or_test 'if' or_test 'else' test */ - expr_ty expression, body, orelse; - - assert(NCH(n) == 5); - body = ast_for_expr(c, CHILD(n, 0)); - if (!body) - return NULL; - expression = ast_for_expr(c, CHILD(n, 2)); - if (!expression) - return NULL; - orelse = ast_for_expr(c, CHILD(n, 4)); - if (!orelse) - return NULL; - return IfExp(expression, body, orelse, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - c->c_arena); -} - -/* - Count the number of 'for' loops in a comprehension. - - Helper for ast_for_comprehension(). -*/ - -static int -count_comp_fors(struct compiling *c, const node *n) -{ - int n_fors = 0; - - count_comp_for: - n_fors++; - REQ(n, comp_for); - if (NCH(n) == 2) { - REQ(CHILD(n, 0), ASYNC); - n = CHILD(n, 1); - } - else if (NCH(n) == 1) { - n = CHILD(n, 0); - } - else { - goto error; - } - if (NCH(n) == (5)) { - n = CHILD(n, 4); - } - else { - return n_fors; - } - count_comp_iter: - REQ(n, comp_iter); - n = CHILD(n, 0); - if (TYPE(n) == comp_for) - goto count_comp_for; - else if (TYPE(n) == comp_if) { - if (NCH(n) == 3) { - n = CHILD(n, 2); - goto count_comp_iter; - } - else - return n_fors; - } - - error: - /* Should never be reached */ - PyErr_SetString(PyExc_SystemError, - "logic error in count_comp_fors"); - return -1; -} - -/* Count the number of 'if' statements in a comprehension. - - Helper for ast_for_comprehension(). -*/ - -static int -count_comp_ifs(struct compiling *c, const node *n) -{ - int n_ifs = 0; - - while (1) { - REQ(n, comp_iter); - if (TYPE(CHILD(n, 0)) == comp_for) - return n_ifs; - n = CHILD(n, 0); - REQ(n, comp_if); - n_ifs++; - if (NCH(n) == 2) - return n_ifs; - n = CHILD(n, 2); - } -} - -static asdl_seq * -ast_for_comprehension(struct compiling *c, const node *n) -{ - int i, n_fors; - asdl_seq *comps; - - n_fors = count_comp_fors(c, n); - if (n_fors == -1) - return NULL; - - comps = _Py_asdl_seq_new(n_fors, c->c_arena); - if (!comps) - return NULL; - - for (i = 0; i < n_fors; i++) { - comprehension_ty comp; - asdl_seq *t; - expr_ty expression, first; - node *for_ch; - node *sync_n; - int is_async = 0; - - REQ(n, comp_for); - - if (NCH(n) == 2) { - is_async = 1; - REQ(CHILD(n, 0), ASYNC); - sync_n = CHILD(n, 1); - } - else { - sync_n = CHILD(n, 0); - } - REQ(sync_n, sync_comp_for); - - /* Async comprehensions only allowed in Python 3.6 and greater */ - if (is_async && c->c_feature_version < 6) { - ast_error(c, n, - "Async comprehensions are only supported in Python 3.6 and greater"); - return NULL; - } - - for_ch = CHILD(sync_n, 1); - t = ast_for_exprlist(c, for_ch, Store); - if (!t) - return NULL; - expression = ast_for_expr(c, CHILD(sync_n, 3)); - if (!expression) - return NULL; - - /* Check the # of children rather than the length of t, since - (x for x, in ...) has 1 element in t, but still requires a Tuple. */ - first = (expr_ty)asdl_seq_GET(t, 0); - if (NCH(for_ch) == 1) - comp = comprehension(first, expression, NULL, - is_async, c->c_arena); - else - comp = comprehension(Tuple(t, Store, first->lineno, first->col_offset, - for_ch->n_end_lineno, for_ch->n_end_col_offset, - c->c_arena), - expression, NULL, is_async, c->c_arena); - if (!comp) - return NULL; - - if (NCH(sync_n) == 5) { - int j, n_ifs; - asdl_seq *ifs; - - n = CHILD(sync_n, 4); - n_ifs = count_comp_ifs(c, n); - if (n_ifs == -1) - return NULL; - - ifs = _Py_asdl_seq_new(n_ifs, c->c_arena); - if (!ifs) - return NULL; - - for (j = 0; j < n_ifs; j++) { - REQ(n, comp_iter); - n = CHILD(n, 0); - REQ(n, comp_if); - - expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - asdl_seq_SET(ifs, j, expression); - if (NCH(n) == 3) - n = CHILD(n, 2); - } - /* on exit, must guarantee that n is a comp_for */ - if (TYPE(n) == comp_iter) - n = CHILD(n, 0); - comp->ifs = ifs; - } - asdl_seq_SET(comps, i, comp); - } - return comps; -} - -static expr_ty -ast_for_itercomp(struct compiling *c, const node *n, int type) -{ - /* testlist_comp: (test|star_expr) - * ( comp_for | (',' (test|star_expr))* [','] ) */ - expr_ty elt; - asdl_seq *comps; - node *ch; - - assert(NCH(n) > 1); - - ch = CHILD(n, 0); - elt = ast_for_expr(c, ch); - if (!elt) - return NULL; - if (elt->kind == Starred_kind) { - ast_error(c, ch, "iterable unpacking cannot be used in comprehension"); - return NULL; - } - - comps = ast_for_comprehension(c, CHILD(n, 1)); - if (!comps) - return NULL; - - if (type == COMP_GENEXP) - return GeneratorExp(elt, comps, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - else if (type == COMP_LISTCOMP) - return ListComp(elt, comps, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - else if (type == COMP_SETCOMP) - return SetComp(elt, comps, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - else - /* Should never happen */ - return NULL; -} - -/* Fills in the key, value pair corresponding to the dict element. In case - * of an unpacking, key is NULL. *i is advanced by the number of ast - * elements. Iff successful, nonzero is returned. - */ -static int -ast_for_dictelement(struct compiling *c, const node *n, int *i, - expr_ty *key, expr_ty *value) -{ - expr_ty expression; - if (TYPE(CHILD(n, *i)) == DOUBLESTAR) { - assert(NCH(n) - *i >= 2); - - expression = ast_for_expr(c, CHILD(n, *i + 1)); - if (!expression) - return 0; - *key = NULL; - *value = expression; - - *i += 2; - } - else { - assert(NCH(n) - *i >= 3); - - expression = ast_for_expr(c, CHILD(n, *i)); - if (!expression) - return 0; - *key = expression; - - REQ(CHILD(n, *i + 1), COLON); - - expression = ast_for_expr(c, CHILD(n, *i + 2)); - if (!expression) - return 0; - *value = expression; - - *i += 3; - } - return 1; -} - -static expr_ty -ast_for_dictcomp(struct compiling *c, const node *n) -{ - expr_ty key, value; - asdl_seq *comps; - int i = 0; - - if (!ast_for_dictelement(c, n, &i, &key, &value)) - return NULL; - assert(key); - assert(NCH(n) - i >= 1); - - comps = ast_for_comprehension(c, CHILD(n, i)); - if (!comps) - return NULL; - - return DictComp(key, value, comps, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static expr_ty -ast_for_dictdisplay(struct compiling *c, const node *n) -{ - int i; - int j; - int size; - asdl_seq *keys, *values; - - size = (NCH(n) + 1) / 3; /* +1 in case no trailing comma */ - keys = _Py_asdl_seq_new(size, c->c_arena); - if (!keys) - return NULL; - - values = _Py_asdl_seq_new(size, c->c_arena); - if (!values) - return NULL; - - j = 0; - for (i = 0; i < NCH(n); i++) { - expr_ty key, value; - - if (!ast_for_dictelement(c, n, &i, &key, &value)) - return NULL; - asdl_seq_SET(keys, j, key); - asdl_seq_SET(values, j, value); - - j++; - } - keys->size = j; - values->size = j; - return Dict(keys, values, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static expr_ty -ast_for_genexp(struct compiling *c, const node *n) -{ - assert(TYPE(n) == (testlist_comp) || TYPE(n) == (argument)); - return ast_for_itercomp(c, n, COMP_GENEXP); -} - -static expr_ty -ast_for_listcomp(struct compiling *c, const node *n) -{ - assert(TYPE(n) == (testlist_comp)); - return ast_for_itercomp(c, n, COMP_LISTCOMP); -} - -static expr_ty -ast_for_setcomp(struct compiling *c, const node *n) -{ - assert(TYPE(n) == (dictorsetmaker)); - return ast_for_itercomp(c, n, COMP_SETCOMP); -} - -static expr_ty -ast_for_setdisplay(struct compiling *c, const node *n) -{ - int i; - int size; - asdl_seq *elts; - - assert(TYPE(n) == (dictorsetmaker)); - size = (NCH(n) + 1) / 2; /* +1 in case no trailing comma */ - elts = _Py_asdl_seq_new(size, c->c_arena); - if (!elts) - return NULL; - for (i = 0; i < NCH(n); i += 2) { - expr_ty expression; - expression = ast_for_expr(c, CHILD(n, i)); - if (!expression) - return NULL; - asdl_seq_SET(elts, i / 2, expression); - } - return Set(elts, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static expr_ty -ast_for_atom(struct compiling *c, const node *n) -{ - /* atom: '(' [yield_expr|testlist_comp] ')' | '[' [testlist_comp] ']' - | '{' [dictmaker|testlist_comp] '}' | NAME | NUMBER | STRING+ - | '...' | 'None' | 'True' | 'False' - */ - node *ch = CHILD(n, 0); - - switch (TYPE(ch)) { - case NAME: { - PyObject *name; - const char *s = STR(ch); - size_t len = strlen(s); - if (len >= 4 && len <= 5) { - if (!strcmp(s, "None")) - return Constant(Py_None, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - if (!strcmp(s, "True")) - return Constant(Py_True, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - if (!strcmp(s, "False")) - return Constant(Py_False, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - name = new_identifier(s, c); - if (!name) - return NULL; - /* All names start in Load context, but may later be changed. */ - return Name(name, Load, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - case STRING: { - expr_ty str = parsestrplus(c, n); - if (!str) { - const char *errtype = NULL; - if (PyErr_ExceptionMatches(PyExc_UnicodeError)) - errtype = "unicode error"; - else if (PyErr_ExceptionMatches(PyExc_ValueError)) - errtype = "value error"; - if (errtype) { - PyObject *type, *value, *tback, *errstr; - PyErr_Fetch(&type, &value, &tback); - errstr = PyObject_Str(value); - if (errstr) { - ast_error(c, n, "(%s) %U", errtype, errstr); - Py_DECREF(errstr); - } - else { - PyErr_Clear(); - ast_error(c, n, "(%s) unknown error", errtype); - } - Py_DECREF(type); - Py_XDECREF(value); - Py_XDECREF(tback); - } - return NULL; - } - return str; - } - case NUMBER: { - PyObject *pynum; - /* Underscores in numeric literals are only allowed in Python 3.6 or greater */ - /* Check for underscores here rather than in parse_number so we can report a line number on error */ - if (c->c_feature_version < 6 && strchr(STR(ch), '_') != NULL) { - ast_error(c, ch, - "Underscores in numeric literals are only supported in Python 3.6 and greater"); - return NULL; - } - pynum = parsenumber(c, STR(ch)); - if (!pynum) - return NULL; - - if (PyArena_AddPyObject(c->c_arena, pynum) < 0) { - Py_DECREF(pynum); - return NULL; - } - return Constant(pynum, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - case ELLIPSIS: /* Ellipsis */ - return Constant(Py_Ellipsis, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - case LPAR: /* some parenthesized expressions */ - ch = CHILD(n, 1); - - if (TYPE(ch) == RPAR) - return Tuple(NULL, Load, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - - if (TYPE(ch) == yield_expr) - return ast_for_expr(c, ch); - - /* testlist_comp: test ( comp_for | (',' test)* [','] ) */ - if (NCH(ch) == 1) { - return ast_for_testlist(c, ch); - } - - if (TYPE(CHILD(ch, 1)) == comp_for) { - return copy_location(ast_for_genexp(c, ch), n, n); - } - else { - return copy_location(ast_for_testlist(c, ch), n, n); - } - case LSQB: /* list (or list comprehension) */ - ch = CHILD(n, 1); - - if (TYPE(ch) == RSQB) - return List(NULL, Load, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - - REQ(ch, testlist_comp); - if (NCH(ch) == 1 || TYPE(CHILD(ch, 1)) == COMMA) { - asdl_seq *elts = seq_for_testlist(c, ch); - if (!elts) - return NULL; - - return List(elts, Load, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else { - return copy_location(ast_for_listcomp(c, ch), n, n); - } - case LBRACE: { - /* dictorsetmaker: ( ((test ':' test | '**' test) - * (comp_for | (',' (test ':' test | '**' test))* [','])) | - * ((test | '*' test) - * (comp_for | (',' (test | '*' test))* [','])) ) */ - expr_ty res; - ch = CHILD(n, 1); - if (TYPE(ch) == RBRACE) { - /* It's an empty dict. */ - return Dict(NULL, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else { - int is_dict = (TYPE(CHILD(ch, 0)) == DOUBLESTAR); - if (NCH(ch) == 1 || - (NCH(ch) > 1 && - TYPE(CHILD(ch, 1)) == COMMA)) { - /* It's a set display. */ - res = ast_for_setdisplay(c, ch); - } - else if (NCH(ch) > 1 && - TYPE(CHILD(ch, 1)) == comp_for) { - /* It's a set comprehension. */ - res = ast_for_setcomp(c, ch); - } - else if (NCH(ch) > 3 - is_dict && - TYPE(CHILD(ch, 3 - is_dict)) == comp_for) { - /* It's a dictionary comprehension. */ - if (is_dict) { - ast_error(c, n, - "dict unpacking cannot be used in dict comprehension"); - return NULL; - } - res = ast_for_dictcomp(c, ch); - } - else { - /* It's a dictionary display. */ - res = ast_for_dictdisplay(c, ch); - } - return copy_location(res, n, n); - } - } - default: - PyErr_Format(PyExc_SystemError, "unhandled atom %d", TYPE(ch)); - return NULL; - } -} - -static expr_ty -ast_for_slice(struct compiling *c, const node *n) -{ - node *ch; - expr_ty lower = NULL, upper = NULL, step = NULL; - - REQ(n, subscript); - - /* - subscript: test | [test] ':' [test] [sliceop] - sliceop: ':' [test] - */ - ch = CHILD(n, 0); - if (NCH(n) == 1 && TYPE(ch) == test) { - return ast_for_expr(c, ch); - } - - if (TYPE(ch) == test) { - lower = ast_for_expr(c, ch); - if (!lower) - return NULL; - } - - /* If there's an upper bound it's in the second or third position. */ - if (TYPE(ch) == COLON) { - if (NCH(n) > 1) { - node *n2 = CHILD(n, 1); - - if (TYPE(n2) == test) { - upper = ast_for_expr(c, n2); - if (!upper) - return NULL; - } - } - } else if (NCH(n) > 2) { - node *n2 = CHILD(n, 2); - - if (TYPE(n2) == test) { - upper = ast_for_expr(c, n2); - if (!upper) - return NULL; - } - } - - ch = CHILD(n, NCH(n) - 1); - if (TYPE(ch) == sliceop) { - if (NCH(ch) != 1) { - ch = CHILD(ch, 1); - if (TYPE(ch) == test) { - step = ast_for_expr(c, ch); - if (!step) - return NULL; - } - } - } - - return Slice(lower, upper, step, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static expr_ty -ast_for_binop(struct compiling *c, const node *n) -{ - /* Must account for a sequence of expressions. - How should A op B op C by represented? - BinOp(BinOp(A, op, B), op, C). - */ - - int i, nops; - expr_ty expr1, expr2, result; - operator_ty newoperator; - - expr1 = ast_for_expr(c, CHILD(n, 0)); - if (!expr1) - return NULL; - - expr2 = ast_for_expr(c, CHILD(n, 2)); - if (!expr2) - return NULL; - - newoperator = get_operator(c, CHILD(n, 1)); - if (!newoperator) - return NULL; - - result = BinOp(expr1, newoperator, expr2, LINENO(n), n->n_col_offset, - CHILD(n, 2)->n_end_lineno, CHILD(n, 2)->n_end_col_offset, - c->c_arena); - if (!result) - return NULL; - - nops = (NCH(n) - 1) / 2; - for (i = 1; i < nops; i++) { - expr_ty tmp_result, tmp; - const node* next_oper = CHILD(n, i * 2 + 1); - - newoperator = get_operator(c, next_oper); - if (!newoperator) - return NULL; - - tmp = ast_for_expr(c, CHILD(n, i * 2 + 2)); - if (!tmp) - return NULL; - - tmp_result = BinOp(result, newoperator, tmp, - LINENO(n), n->n_col_offset, - CHILD(n, i * 2 + 2)->n_end_lineno, - CHILD(n, i * 2 + 2)->n_end_col_offset, - c->c_arena); - if (!tmp_result) - return NULL; - result = tmp_result; - } - return result; -} - -static expr_ty -ast_for_trailer(struct compiling *c, const node *n, expr_ty left_expr, const node *start) -{ - /* trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME - subscriptlist: subscript (',' subscript)* [','] - subscript: '.' '.' '.' | test | [test] ':' [test] [sliceop] - */ - const node *n_copy = n; - REQ(n, trailer); - if (TYPE(CHILD(n, 0)) == LPAR) { - if (NCH(n) == 2) - return Call(left_expr, NULL, NULL, LINENO(start), start->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - else - return ast_for_call(c, CHILD(n, 1), left_expr, - start, CHILD(n, 0), CHILD(n, 2)); - } - else if (TYPE(CHILD(n, 0)) == DOT) { - PyObject *attr_id = NEW_IDENTIFIER(CHILD(n, 1)); - if (!attr_id) - return NULL; - return Attribute(left_expr, attr_id, Load, - LINENO(start), start->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else { - REQ(CHILD(n, 0), LSQB); - REQ(CHILD(n, 2), RSQB); - n = CHILD(n, 1); - if (NCH(n) == 1) { - expr_ty slc = ast_for_slice(c, CHILD(n, 0)); - if (!slc) - return NULL; - return Subscript(left_expr, slc, Load, LINENO(start), start->n_col_offset, - n_copy->n_end_lineno, n_copy->n_end_col_offset, - c->c_arena); - } - else { - int j; - expr_ty slc, e; - asdl_seq *elts; - elts = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena); - if (!elts) - return NULL; - for (j = 0; j < NCH(n); j += 2) { - slc = ast_for_slice(c, CHILD(n, j)); - if (!slc) - return NULL; - asdl_seq_SET(elts, j / 2, slc); - } - e = Tuple(elts, Load, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - c->c_arena); - if (!e) - return NULL; - return Subscript(left_expr, e, - Load, LINENO(start), start->n_col_offset, - n_copy->n_end_lineno, n_copy->n_end_col_offset, - c->c_arena); - } - } -} - -static expr_ty -ast_for_factor(struct compiling *c, const node *n) -{ - expr_ty expression; - - expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - - switch (TYPE(CHILD(n, 0))) { - case PLUS: - return UnaryOp(UAdd, expression, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - c->c_arena); - case MINUS: - return UnaryOp(USub, expression, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - c->c_arena); - case TILDE: - return UnaryOp(Invert, expression, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - c->c_arena); - } - PyErr_Format(PyExc_SystemError, "unhandled factor: %d", - TYPE(CHILD(n, 0))); - return NULL; -} - -static expr_ty -ast_for_atom_expr(struct compiling *c, const node *n) -{ - int i, nch, start = 0; - expr_ty e; - - REQ(n, atom_expr); - nch = NCH(n); - - if (TYPE(CHILD(n, 0)) == AWAIT) { - if (c->c_feature_version < 5) { - ast_error(c, n, - "Await expressions are only supported in Python 3.5 and greater"); - return NULL; - } - start = 1; - assert(nch > 1); - } - - e = ast_for_atom(c, CHILD(n, start)); - if (!e) - return NULL; - if (nch == 1) - return e; - if (start && nch == 2) { - return Await(e, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - - for (i = start + 1; i < nch; i++) { - node *ch = CHILD(n, i); - if (TYPE(ch) != trailer) - break; - e = ast_for_trailer(c, ch, e, CHILD(n, start)); - if (!e) - return NULL; - } - - if (start) { - /* there was an 'await' */ - return Await(e, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else { - return e; - } -} - -static expr_ty -ast_for_power(struct compiling *c, const node *n) -{ - /* power: atom trailer* ('**' factor)* - */ - expr_ty e; - REQ(n, power); - e = ast_for_atom_expr(c, CHILD(n, 0)); - if (!e) - return NULL; - if (NCH(n) == 1) - return e; - if (TYPE(CHILD(n, NCH(n) - 1)) == factor) { - expr_ty f = ast_for_expr(c, CHILD(n, NCH(n) - 1)); - if (!f) - return NULL; - e = BinOp(e, Pow, f, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - return e; -} - -static expr_ty -ast_for_starred(struct compiling *c, const node *n) -{ - expr_ty tmp; - REQ(n, star_expr); - - tmp = ast_for_expr(c, CHILD(n, 1)); - if (!tmp) - return NULL; - - /* The Load context is changed later. */ - return Starred(tmp, Load, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - - -/* Do not name a variable 'expr'! Will cause a compile error. -*/ - -static expr_ty -ast_for_expr(struct compiling *c, const node *n) -{ - /* handle the full range of simple expressions - namedexpr_test: test [':=' test] - test: or_test ['if' or_test 'else' test] | lambdef - test_nocond: or_test | lambdef_nocond - or_test: and_test ('or' and_test)* - and_test: not_test ('and' not_test)* - not_test: 'not' not_test | comparison - comparison: expr (comp_op expr)* - expr: xor_expr ('|' xor_expr)* - xor_expr: and_expr ('^' and_expr)* - and_expr: shift_expr ('&' shift_expr)* - shift_expr: arith_expr (('<<'|'>>') arith_expr)* - arith_expr: term (('+'|'-') term)* - term: factor (('*'|'@'|'/'|'%'|'//') factor)* - factor: ('+'|'-'|'~') factor | power - power: atom_expr ['**' factor] - atom_expr: [AWAIT] atom trailer* - yield_expr: 'yield' [yield_arg] - */ - - asdl_seq *seq; - int i; - - loop: - switch (TYPE(n)) { - case namedexpr_test: - if (NCH(n) == 3) - return ast_for_namedexpr(c, n); - /* Fallthrough */ - case test: - case test_nocond: - if (TYPE(CHILD(n, 0)) == lambdef || - TYPE(CHILD(n, 0)) == lambdef_nocond) - return ast_for_lambdef(c, CHILD(n, 0)); - else if (NCH(n) > 1) - return ast_for_ifexpr(c, n); - /* Fallthrough */ - case or_test: - case and_test: - if (NCH(n) == 1) { - n = CHILD(n, 0); - goto loop; - } - seq = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena); - if (!seq) - return NULL; - for (i = 0; i < NCH(n); i += 2) { - expr_ty e = ast_for_expr(c, CHILD(n, i)); - if (!e) - return NULL; - asdl_seq_SET(seq, i / 2, e); - } - if (!strcmp(STR(CHILD(n, 1)), "and")) - return BoolOp(And, seq, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - c->c_arena); - assert(!strcmp(STR(CHILD(n, 1)), "or")); - return BoolOp(Or, seq, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - case not_test: - if (NCH(n) == 1) { - n = CHILD(n, 0); - goto loop; - } - else { - expr_ty expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - - return UnaryOp(Not, expression, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, - c->c_arena); - } - case comparison: - if (NCH(n) == 1) { - n = CHILD(n, 0); - goto loop; - } - else { - expr_ty expression; - asdl_int_seq *ops; - asdl_seq *cmps; - ops = _Py_asdl_int_seq_new(NCH(n) / 2, c->c_arena); - if (!ops) - return NULL; - cmps = _Py_asdl_seq_new(NCH(n) / 2, c->c_arena); - if (!cmps) { - return NULL; - } - for (i = 1; i < NCH(n); i += 2) { - cmpop_ty newoperator; - - newoperator = ast_for_comp_op(c, CHILD(n, i)); - if (!newoperator) { - return NULL; - } - - expression = ast_for_expr(c, CHILD(n, i + 1)); - if (!expression) { - return NULL; - } - - asdl_seq_SET(ops, i / 2, newoperator); - asdl_seq_SET(cmps, i / 2, expression); - } - expression = ast_for_expr(c, CHILD(n, 0)); - if (!expression) { - return NULL; - } - - return Compare(expression, ops, cmps, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - - case star_expr: - return ast_for_starred(c, n); - /* The next five cases all handle BinOps. The main body of code - is the same in each case, but the switch turned inside out to - reuse the code for each type of operator. - */ - case expr: - case xor_expr: - case and_expr: - case shift_expr: - case arith_expr: - case term: - if (NCH(n) == 1) { - n = CHILD(n, 0); - goto loop; - } - return ast_for_binop(c, n); - case yield_expr: { - node *an = NULL; - node *en = NULL; - int is_from = 0; - expr_ty exp = NULL; - if (NCH(n) > 1) - an = CHILD(n, 1); /* yield_arg */ - if (an) { - en = CHILD(an, NCH(an) - 1); - if (NCH(an) == 2) { - is_from = 1; - exp = ast_for_expr(c, en); - } - else - exp = ast_for_testlist(c, en); - if (!exp) - return NULL; - } - if (is_from) - return YieldFrom(exp, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - return Yield(exp, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - case factor: - if (NCH(n) == 1) { - n = CHILD(n, 0); - goto loop; - } - return ast_for_factor(c, n); - case power: - return ast_for_power(c, n); - default: - PyErr_Format(PyExc_SystemError, "unhandled expr: %d", TYPE(n)); - return NULL; - } - /* should never get here unless if error is set */ - return NULL; -} - -static expr_ty -ast_for_call(struct compiling *c, const node *n, expr_ty func, - const node *start, const node *maybegenbeg, const node *closepar) -{ - /* - arglist: argument (',' argument)* [','] - argument: ( test [comp_for] | '*' test | test '=' test | '**' test ) - */ - - int i, nargs, nkeywords; - int ndoublestars; - asdl_seq *args; - asdl_seq *keywords; - - REQ(n, arglist); - - nargs = 0; - nkeywords = 0; - for (i = 0; i < NCH(n); i++) { - node *ch = CHILD(n, i); - if (TYPE(ch) == argument) { - if (NCH(ch) == 1) - nargs++; - else if (TYPE(CHILD(ch, 1)) == comp_for) { - nargs++; - if (!maybegenbeg) { - ast_error(c, ch, "invalid syntax"); - return NULL; - } - if (NCH(n) > 1) { - ast_error(c, ch, "Generator expression must be parenthesized"); - return NULL; - } - } - else if (TYPE(CHILD(ch, 0)) == STAR) - nargs++; - else if (TYPE(CHILD(ch, 1)) == COLONEQUAL) { - nargs++; - } - else - /* TYPE(CHILD(ch, 0)) == DOUBLESTAR or keyword argument */ - nkeywords++; - } - } - - args = _Py_asdl_seq_new(nargs, c->c_arena); - if (!args) - return NULL; - keywords = _Py_asdl_seq_new(nkeywords, c->c_arena); - if (!keywords) - return NULL; - - nargs = 0; /* positional arguments + iterable argument unpackings */ - nkeywords = 0; /* keyword arguments + keyword argument unpackings */ - ndoublestars = 0; /* just keyword argument unpackings */ - for (i = 0; i < NCH(n); i++) { - node *ch = CHILD(n, i); - if (TYPE(ch) == argument) { - expr_ty e; - node *chch = CHILD(ch, 0); - if (NCH(ch) == 1) { - /* a positional argument */ - if (nkeywords) { - if (ndoublestars) { - ast_error(c, chch, - "positional argument follows " - "keyword argument unpacking"); - } - else { - ast_error(c, chch, - "positional argument follows " - "keyword argument"); - } - return NULL; - } - e = ast_for_expr(c, chch); - if (!e) - return NULL; - asdl_seq_SET(args, nargs++, e); - } - else if (TYPE(chch) == STAR) { - /* an iterable argument unpacking */ - expr_ty starred; - if (ndoublestars) { - ast_error(c, chch, - "iterable argument unpacking follows " - "keyword argument unpacking"); - return NULL; - } - e = ast_for_expr(c, CHILD(ch, 1)); - if (!e) - return NULL; - starred = Starred(e, Load, LINENO(chch), - chch->n_col_offset, - e->end_lineno, e->end_col_offset, - c->c_arena); - if (!starred) - return NULL; - asdl_seq_SET(args, nargs++, starred); - - } - else if (TYPE(chch) == DOUBLESTAR) { - /* a keyword argument unpacking */ - keyword_ty kw; - i++; - e = ast_for_expr(c, CHILD(ch, 1)); - if (!e) - return NULL; - kw = keyword(NULL, e, chch->n_lineno, chch->n_col_offset, - e->end_lineno, e->end_col_offset, c->c_arena); - asdl_seq_SET(keywords, nkeywords++, kw); - ndoublestars++; - } - else if (TYPE(CHILD(ch, 1)) == comp_for) { - /* the lone generator expression */ - e = copy_location(ast_for_genexp(c, ch), maybegenbeg, closepar); - if (!e) - return NULL; - asdl_seq_SET(args, nargs++, e); - } - else if (TYPE(CHILD(ch, 1)) == COLONEQUAL) { - /* treat colon equal as positional argument */ - if (nkeywords) { - if (ndoublestars) { - ast_error(c, chch, - "positional argument follows " - "keyword argument unpacking"); - } - else { - ast_error(c, chch, - "positional argument follows " - "keyword argument"); - } - return NULL; - } - e = ast_for_namedexpr(c, ch); - if (!e) - return NULL; - asdl_seq_SET(args, nargs++, e); - } - else { - /* a keyword argument */ - keyword_ty kw; - identifier key; - - // To remain LL(1), the grammar accepts any test (basically, any - // expression) in the keyword slot of a call site. So, we need - // to manually enforce that the keyword is a NAME here. - static const int name_tree[] = { - test, - or_test, - and_test, - not_test, - comparison, - expr, - xor_expr, - and_expr, - shift_expr, - arith_expr, - term, - factor, - power, - atom_expr, - atom, - 0, - }; - node *expr_node = chch; - for (int i = 0; name_tree[i]; i++) { - if (TYPE(expr_node) != name_tree[i]) - break; - if (NCH(expr_node) != 1) - break; - expr_node = CHILD(expr_node, 0); - } - if (TYPE(expr_node) != NAME) { - ast_error(c, chch, - "expression cannot contain assignment, " - "perhaps you meant \"==\"?"); - return NULL; - } - key = new_identifier(STR(expr_node), c); - if (key == NULL) { - return NULL; - } - if (forbidden_name(c, key, chch, 1)) { - return NULL; - } - e = ast_for_expr(c, CHILD(ch, 2)); - if (!e) - return NULL; - kw = keyword(key, e, chch->n_lineno, chch->n_col_offset, - e->end_lineno, e->end_col_offset, c->c_arena); - - if (!kw) - return NULL; - asdl_seq_SET(keywords, nkeywords++, kw); - } - } - } - - return Call(func, args, keywords, LINENO(start), start->n_col_offset, - closepar->n_end_lineno, closepar->n_end_col_offset, c->c_arena); -} - -static expr_ty -ast_for_testlist(struct compiling *c, const node* n) -{ - /* testlist_comp: test (comp_for | (',' test)* [',']) */ - /* testlist: test (',' test)* [','] */ - assert(NCH(n) > 0); - if (TYPE(n) == testlist_comp) { - if (NCH(n) > 1) - assert(TYPE(CHILD(n, 1)) != comp_for); - } - else { - assert(TYPE(n) == testlist || - TYPE(n) == testlist_star_expr); - } - if (NCH(n) == 1) - return ast_for_expr(c, CHILD(n, 0)); - else { - asdl_seq *tmp = seq_for_testlist(c, n); - if (!tmp) - return NULL; - return Tuple(tmp, Load, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } -} - -static stmt_ty -ast_for_expr_stmt(struct compiling *c, const node *n) -{ - REQ(n, expr_stmt); - /* expr_stmt: testlist_star_expr (annassign | augassign (yield_expr|testlist) | - [('=' (yield_expr|testlist_star_expr))+ [TYPE_COMMENT]] ) - annassign: ':' test ['=' (yield_expr|testlist)] - testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] - augassign: ('+=' | '-=' | '*=' | '@=' | '/=' | '%=' | '&=' | '|=' | '^=' | - '<<=' | '>>=' | '**=' | '//=') - test: ... here starts the operator precedence dance - */ - int num = NCH(n); - - if (num == 1) { - expr_ty e = ast_for_testlist(c, CHILD(n, 0)); - if (!e) - return NULL; - - return Expr(e, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else if (TYPE(CHILD(n, 1)) == augassign) { - expr_ty expr1, expr2; - operator_ty newoperator; - node *ch = CHILD(n, 0); - - expr1 = ast_for_testlist(c, ch); - if (!expr1) - return NULL; - /* Augmented assignments can only have a name, a subscript, or an - attribute on the left, though, so we have to explicitly check for - those. */ - switch (expr1->kind) { - case Name_kind: - case Attribute_kind: - case Subscript_kind: - break; - default: - ast_error(c, ch, "'%s' is an illegal expression for augmented assignment", - get_expr_name(expr1)); - return NULL; - } - - /* set_context checks that most expressions are not the left side. */ - if(!set_context(c, expr1, Store, ch)) { - return NULL; - } - - ch = CHILD(n, 2); - if (TYPE(ch) == testlist) - expr2 = ast_for_testlist(c, ch); - else - expr2 = ast_for_expr(c, ch); - if (!expr2) - return NULL; - - newoperator = ast_for_augassign(c, CHILD(n, 1)); - if (!newoperator) - return NULL; - - return AugAssign(expr1, newoperator, expr2, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else if (TYPE(CHILD(n, 1)) == annassign) { - expr_ty expr1, expr2, expr3; - node *ch = CHILD(n, 0); - node *deep, *ann = CHILD(n, 1); - int simple = 1; - - /* AnnAssigns are only allowed in Python 3.6 or greater */ - if (c->c_feature_version < 6) { - ast_error(c, ch, - "Variable annotation syntax is only supported in Python 3.6 and greater"); - return NULL; - } - - /* we keep track of parens to qualify (x) as expression not name */ - deep = ch; - while (NCH(deep) == 1) { - deep = CHILD(deep, 0); - } - if (NCH(deep) > 0 && TYPE(CHILD(deep, 0)) == LPAR) { - simple = 0; - } - expr1 = ast_for_testlist(c, ch); - if (!expr1) { - return NULL; - } - switch (expr1->kind) { - case Name_kind: - if (forbidden_name(c, expr1->v.Name.id, n, 0)) { - return NULL; - } - expr1->v.Name.ctx = Store; - break; - case Attribute_kind: - if (forbidden_name(c, expr1->v.Attribute.attr, n, 1)) { - return NULL; - } - expr1->v.Attribute.ctx = Store; - break; - case Subscript_kind: - expr1->v.Subscript.ctx = Store; - break; - case List_kind: - ast_error(c, ch, - "only single target (not list) can be annotated"); - return NULL; - case Tuple_kind: - ast_error(c, ch, - "only single target (not tuple) can be annotated"); - return NULL; - default: - ast_error(c, ch, - "illegal target for annotation"); - return NULL; - } - - if (expr1->kind != Name_kind) { - simple = 0; - } - ch = CHILD(ann, 1); - expr2 = ast_for_expr(c, ch); - if (!expr2) { - return NULL; - } - if (NCH(ann) == 2) { - return AnnAssign(expr1, expr2, NULL, simple, - LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else { - ch = CHILD(ann, 3); - if (TYPE(ch) == testlist_star_expr) { - expr3 = ast_for_testlist(c, ch); - } - else { - expr3 = ast_for_expr(c, ch); - } - if (!expr3) { - return NULL; - } - return AnnAssign(expr1, expr2, expr3, simple, - LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - } - else { - int i, nch_minus_type, has_type_comment; - asdl_seq *targets; - node *value; - expr_ty expression; - string type_comment; - - /* a normal assignment */ - REQ(CHILD(n, 1), EQUAL); - - has_type_comment = TYPE(CHILD(n, num - 1)) == TYPE_COMMENT; - nch_minus_type = num - has_type_comment; - - targets = _Py_asdl_seq_new(nch_minus_type / 2, c->c_arena); - if (!targets) - return NULL; - for (i = 0; i < nch_minus_type - 2; i += 2) { - expr_ty e; - node *ch = CHILD(n, i); - if (TYPE(ch) == yield_expr) { - ast_error(c, ch, "assignment to yield expression not possible"); - return NULL; - } - e = ast_for_testlist(c, ch); - if (!e) - return NULL; - - /* set context to assign */ - if (!set_context(c, e, Store, CHILD(n, i))) - return NULL; - - asdl_seq_SET(targets, i / 2, e); - } - value = CHILD(n, nch_minus_type - 1); - if (TYPE(value) == testlist_star_expr) - expression = ast_for_testlist(c, value); - else - expression = ast_for_expr(c, value); - if (!expression) - return NULL; - if (has_type_comment) { - type_comment = NEW_TYPE_COMMENT(CHILD(n, nch_minus_type)); - if (!type_comment) - return NULL; - } - else - type_comment = NULL; - return Assign(targets, expression, type_comment, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } -} - - -static asdl_seq * -ast_for_exprlist(struct compiling *c, const node *n, expr_context_ty context) -{ - asdl_seq *seq; - int i; - expr_ty e; - - REQ(n, exprlist); - - seq = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena); - if (!seq) - return NULL; - for (i = 0; i < NCH(n); i += 2) { - e = ast_for_expr(c, CHILD(n, i)); - if (!e) - return NULL; - asdl_seq_SET(seq, i / 2, e); - if (context && !set_context(c, e, context, CHILD(n, i))) - return NULL; - } - return seq; -} - -static stmt_ty -ast_for_del_stmt(struct compiling *c, const node *n) -{ - asdl_seq *expr_list; - - /* del_stmt: 'del' exprlist */ - REQ(n, del_stmt); - - expr_list = ast_for_exprlist(c, CHILD(n, 1), Del); - if (!expr_list) - return NULL; - return Delete(expr_list, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static stmt_ty -ast_for_flow_stmt(struct compiling *c, const node *n) -{ - /* - flow_stmt: break_stmt | continue_stmt | return_stmt | raise_stmt - | yield_stmt - break_stmt: 'break' - continue_stmt: 'continue' - return_stmt: 'return' [testlist] - yield_stmt: yield_expr - yield_expr: 'yield' testlist | 'yield' 'from' test - raise_stmt: 'raise' [test [',' test [',' test]]] - */ - node *ch; - - REQ(n, flow_stmt); - ch = CHILD(n, 0); - switch (TYPE(ch)) { - case break_stmt: - return Break(LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - case continue_stmt: - return Continue(LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - case yield_stmt: { /* will reduce to yield_expr */ - expr_ty exp = ast_for_expr(c, CHILD(ch, 0)); - if (!exp) - return NULL; - return Expr(exp, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - case return_stmt: - if (NCH(ch) == 1) - return Return(NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - else { - expr_ty expression = ast_for_testlist(c, CHILD(ch, 1)); - if (!expression) - return NULL; - return Return(expression, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - case raise_stmt: - if (NCH(ch) == 1) - return Raise(NULL, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - else if (NCH(ch) >= 2) { - expr_ty cause = NULL; - expr_ty expression = ast_for_expr(c, CHILD(ch, 1)); - if (!expression) - return NULL; - if (NCH(ch) == 4) { - cause = ast_for_expr(c, CHILD(ch, 3)); - if (!cause) - return NULL; - } - return Raise(expression, cause, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - /* fall through */ - default: - PyErr_Format(PyExc_SystemError, - "unexpected flow_stmt: %d", TYPE(ch)); - return NULL; - } -} - -static alias_ty -alias_for_import_name(struct compiling *c, const node *n, int store) -{ - /* - import_as_name: NAME ['as' NAME] - dotted_as_name: dotted_name ['as' NAME] - dotted_name: NAME ('.' NAME)* - */ - identifier str, name; - - loop: - switch (TYPE(n)) { - case import_as_name: { - node *name_node = CHILD(n, 0); - str = NULL; - name = NEW_IDENTIFIER(name_node); - if (!name) - return NULL; - if (NCH(n) == 3) { - node *str_node = CHILD(n, 2); - str = NEW_IDENTIFIER(str_node); - if (!str) - return NULL; - if (store && forbidden_name(c, str, str_node, 0)) - return NULL; - } - else { - if (forbidden_name(c, name, name_node, 0)) - return NULL; - } - return alias(name, str, c->c_arena); - } - case dotted_as_name: - if (NCH(n) == 1) { - n = CHILD(n, 0); - goto loop; - } - else { - node *asname_node = CHILD(n, 2); - alias_ty a = alias_for_import_name(c, CHILD(n, 0), 0); - if (!a) - return NULL; - assert(!a->asname); - a->asname = NEW_IDENTIFIER(asname_node); - if (!a->asname) - return NULL; - if (forbidden_name(c, a->asname, asname_node, 0)) - return NULL; - return a; - } - case dotted_name: - if (NCH(n) == 1) { - node *name_node = CHILD(n, 0); - name = NEW_IDENTIFIER(name_node); - if (!name) - return NULL; - if (store && forbidden_name(c, name, name_node, 0)) - return NULL; - return alias(name, NULL, c->c_arena); - } - else { - /* Create a string of the form "a.b.c" */ - int i; - size_t len; - char *s; - PyObject *uni; - - len = 0; - for (i = 0; i < NCH(n); i += 2) - /* length of string plus one for the dot */ - len += strlen(STR(CHILD(n, i))) + 1; - len--; /* the last name doesn't have a dot */ - str = PyBytes_FromStringAndSize(NULL, len); - if (!str) - return NULL; - s = PyBytes_AS_STRING(str); - if (!s) - return NULL; - for (i = 0; i < NCH(n); i += 2) { - char *sch = STR(CHILD(n, i)); - strcpy(s, STR(CHILD(n, i))); - s += strlen(sch); - *s++ = '.'; - } - --s; - *s = '\0'; - uni = PyUnicode_DecodeUTF8(PyBytes_AS_STRING(str), - PyBytes_GET_SIZE(str), - NULL); - Py_DECREF(str); - if (!uni) - return NULL; - str = uni; - PyUnicode_InternInPlace(&str); - if (PyArena_AddPyObject(c->c_arena, str) < 0) { - Py_DECREF(str); - return NULL; - } - return alias(str, NULL, c->c_arena); - } - case STAR: - str = PyUnicode_InternFromString("*"); - if (!str) - return NULL; - if (PyArena_AddPyObject(c->c_arena, str) < 0) { - Py_DECREF(str); - return NULL; - } - return alias(str, NULL, c->c_arena); - default: - PyErr_Format(PyExc_SystemError, - "unexpected import name: %d", TYPE(n)); - return NULL; - } -} - -static stmt_ty -ast_for_import_stmt(struct compiling *c, const node *n) -{ - /* - import_stmt: import_name | import_from - import_name: 'import' dotted_as_names - import_from: 'from' (('.' | '...')* dotted_name | ('.' | '...')+) - 'import' ('*' | '(' import_as_names ')' | import_as_names) - */ - int lineno; - int col_offset; - int i; - asdl_seq *aliases; - - REQ(n, import_stmt); - lineno = LINENO(n); - col_offset = n->n_col_offset; - n = CHILD(n, 0); - if (TYPE(n) == import_name) { - n = CHILD(n, 1); - REQ(n, dotted_as_names); - aliases = _Py_asdl_seq_new((NCH(n) + 1) / 2, c->c_arena); - if (!aliases) - return NULL; - for (i = 0; i < NCH(n); i += 2) { - alias_ty import_alias = alias_for_import_name(c, CHILD(n, i), 1); - if (!import_alias) - return NULL; - asdl_seq_SET(aliases, i / 2, import_alias); - } - // Even though n is modified above, the end position is not changed - return Import(aliases, lineno, col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else if (TYPE(n) == import_from) { - int n_children; - int idx, ndots = 0; - const node *n_copy = n; - alias_ty mod = NULL; - identifier modname = NULL; - - /* Count the number of dots (for relative imports) and check for the - optional module name */ - for (idx = 1; idx < NCH(n); idx++) { - if (TYPE(CHILD(n, idx)) == dotted_name) { - mod = alias_for_import_name(c, CHILD(n, idx), 0); - if (!mod) - return NULL; - idx++; - break; - } else if (TYPE(CHILD(n, idx)) == ELLIPSIS) { - /* three consecutive dots are tokenized as one ELLIPSIS */ - ndots += 3; - continue; - } else if (TYPE(CHILD(n, idx)) != DOT) { - break; - } - ndots++; - } - idx++; /* skip over the 'import' keyword */ - switch (TYPE(CHILD(n, idx))) { - case STAR: - /* from ... import * */ - n = CHILD(n, idx); - n_children = 1; - break; - case LPAR: - /* from ... import (x, y, z) */ - n = CHILD(n, idx + 1); - n_children = NCH(n); - break; - case import_as_names: - /* from ... import x, y, z */ - n = CHILD(n, idx); - n_children = NCH(n); - if (n_children % 2 == 0) { - ast_error(c, n, - "trailing comma not allowed without" - " surrounding parentheses"); - return NULL; - } - break; - default: - ast_error(c, n, "Unexpected node-type in from-import"); - return NULL; - } - - aliases = _Py_asdl_seq_new((n_children + 1) / 2, c->c_arena); - if (!aliases) - return NULL; - - /* handle "from ... import *" special b/c there's no children */ - if (TYPE(n) == STAR) { - alias_ty import_alias = alias_for_import_name(c, n, 1); - if (!import_alias) - return NULL; - asdl_seq_SET(aliases, 0, import_alias); - } - else { - for (i = 0; i < NCH(n); i += 2) { - alias_ty import_alias = alias_for_import_name(c, CHILD(n, i), 1); - if (!import_alias) - return NULL; - asdl_seq_SET(aliases, i / 2, import_alias); - } - } - if (mod != NULL) - modname = mod->name; - return ImportFrom(modname, aliases, ndots, lineno, col_offset, - n_copy->n_end_lineno, n_copy->n_end_col_offset, - c->c_arena); - } - PyErr_Format(PyExc_SystemError, - "unknown import statement: starts with command '%s'", - STR(CHILD(n, 0))); - return NULL; -} - -static stmt_ty -ast_for_global_stmt(struct compiling *c, const node *n) -{ - /* global_stmt: 'global' NAME (',' NAME)* */ - identifier name; - asdl_seq *s; - int i; - - REQ(n, global_stmt); - s = _Py_asdl_seq_new(NCH(n) / 2, c->c_arena); - if (!s) - return NULL; - for (i = 1; i < NCH(n); i += 2) { - name = NEW_IDENTIFIER(CHILD(n, i)); - if (!name) - return NULL; - asdl_seq_SET(s, i / 2, name); - } - return Global(s, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static stmt_ty -ast_for_nonlocal_stmt(struct compiling *c, const node *n) -{ - /* nonlocal_stmt: 'nonlocal' NAME (',' NAME)* */ - identifier name; - asdl_seq *s; - int i; - - REQ(n, nonlocal_stmt); - s = _Py_asdl_seq_new(NCH(n) / 2, c->c_arena); - if (!s) - return NULL; - for (i = 1; i < NCH(n); i += 2) { - name = NEW_IDENTIFIER(CHILD(n, i)); - if (!name) - return NULL; - asdl_seq_SET(s, i / 2, name); - } - return Nonlocal(s, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -static stmt_ty -ast_for_assert_stmt(struct compiling *c, const node *n) -{ - /* assert_stmt: 'assert' test [',' test] */ - REQ(n, assert_stmt); - if (NCH(n) == 2) { - expr_ty expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - return Assert(expression, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - else if (NCH(n) == 4) { - expr_ty expr1, expr2; - - expr1 = ast_for_expr(c, CHILD(n, 1)); - if (!expr1) - return NULL; - expr2 = ast_for_expr(c, CHILD(n, 3)); - if (!expr2) - return NULL; - - return Assert(expr1, expr2, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - PyErr_Format(PyExc_SystemError, - "improper number of parts to 'assert' statement: %d", - NCH(n)); - return NULL; -} - -static asdl_seq * -ast_for_suite(struct compiling *c, const node *n) -{ - /* suite: simple_stmt | NEWLINE [TYPE_COMMENT NEWLINE] INDENT stmt+ DEDENT */ - asdl_seq *seq; - stmt_ty s; - int i, total, num, end, pos = 0; - node *ch; - - if (TYPE(n) != func_body_suite) { - REQ(n, suite); - } - - total = num_stmts(n); - seq = _Py_asdl_seq_new(total, c->c_arena); - if (!seq) - return NULL; - if (TYPE(CHILD(n, 0)) == simple_stmt) { - n = CHILD(n, 0); - /* simple_stmt always ends with a NEWLINE, - and may have a trailing SEMI - */ - end = NCH(n) - 1; - if (TYPE(CHILD(n, end - 1)) == SEMI) - end--; - /* loop by 2 to skip semi-colons */ - for (i = 0; i < end; i += 2) { - ch = CHILD(n, i); - s = ast_for_stmt(c, ch); - if (!s) - return NULL; - asdl_seq_SET(seq, pos++, s); - } - } - else { - i = 2; - if (TYPE(CHILD(n, 1)) == TYPE_COMMENT) { - i += 2; - REQ(CHILD(n, 2), NEWLINE); - } - - for (; i < (NCH(n) - 1); i++) { - ch = CHILD(n, i); - REQ(ch, stmt); - num = num_stmts(ch); - if (num == 1) { - /* small_stmt or compound_stmt with only one child */ - s = ast_for_stmt(c, ch); - if (!s) - return NULL; - asdl_seq_SET(seq, pos++, s); - } - else { - int j; - ch = CHILD(ch, 0); - REQ(ch, simple_stmt); - for (j = 0; j < NCH(ch); j += 2) { - /* statement terminates with a semi-colon ';' */ - if (NCH(CHILD(ch, j)) == 0) { - assert((j + 1) == NCH(ch)); - break; - } - s = ast_for_stmt(c, CHILD(ch, j)); - if (!s) - return NULL; - asdl_seq_SET(seq, pos++, s); - } - } - } - } - assert(pos == seq->size); - return seq; -} - -static void -get_last_end_pos(asdl_seq *s, int *end_lineno, int *end_col_offset) -{ - Py_ssize_t tot = asdl_seq_LEN(s); - // There must be no empty suites. - assert(tot > 0); - stmt_ty last = asdl_seq_GET(s, tot - 1); - *end_lineno = last->end_lineno; - *end_col_offset = last->end_col_offset; -} - -static stmt_ty -ast_for_if_stmt(struct compiling *c, const node *n) -{ - /* if_stmt: 'if' test ':' suite ('elif' test ':' suite)* - ['else' ':' suite] - */ - char *s; - int end_lineno, end_col_offset; - - REQ(n, if_stmt); - - if (NCH(n) == 4) { - expr_ty expression; - asdl_seq *suite_seq; - - expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, CHILD(n, 3)); - if (!suite_seq) - return NULL; - get_last_end_pos(suite_seq, &end_lineno, &end_col_offset); - - return If(expression, suite_seq, NULL, LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - - s = STR(CHILD(n, 4)); - /* s[2], the third character in the string, will be - 's' for el_s_e, or - 'i' for el_i_f - */ - if (s[2] == 's') { - expr_ty expression; - asdl_seq *seq1, *seq2; - - expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - seq1 = ast_for_suite(c, CHILD(n, 3)); - if (!seq1) - return NULL; - seq2 = ast_for_suite(c, CHILD(n, 6)); - if (!seq2) - return NULL; - get_last_end_pos(seq2, &end_lineno, &end_col_offset); - - return If(expression, seq1, seq2, LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - else if (s[2] == 'i') { - int i, n_elif, has_else = 0; - expr_ty expression; - asdl_seq *suite_seq; - asdl_seq *orelse = NULL; - n_elif = NCH(n) - 4; - /* must reference the child n_elif+1 since 'else' token is third, - not fourth, child from the end. */ - if (TYPE(CHILD(n, (n_elif + 1))) == NAME - && STR(CHILD(n, (n_elif + 1)))[2] == 's') { - has_else = 1; - n_elif -= 3; - } - n_elif /= 4; - - if (has_else) { - asdl_seq *suite_seq2; - - orelse = _Py_asdl_seq_new(1, c->c_arena); - if (!orelse) - return NULL; - expression = ast_for_expr(c, CHILD(n, NCH(n) - 6)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, CHILD(n, NCH(n) - 4)); - if (!suite_seq) - return NULL; - suite_seq2 = ast_for_suite(c, CHILD(n, NCH(n) - 1)); - if (!suite_seq2) - return NULL; - get_last_end_pos(suite_seq2, &end_lineno, &end_col_offset); - - asdl_seq_SET(orelse, 0, - If(expression, suite_seq, suite_seq2, - LINENO(CHILD(n, NCH(n) - 7)), - CHILD(n, NCH(n) - 7)->n_col_offset, - end_lineno, end_col_offset, c->c_arena)); - /* the just-created orelse handled the last elif */ - n_elif--; - } - - for (i = 0; i < n_elif; i++) { - int off = 5 + (n_elif - i - 1) * 4; - asdl_seq *newobj = _Py_asdl_seq_new(1, c->c_arena); - if (!newobj) - return NULL; - expression = ast_for_expr(c, CHILD(n, off)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, CHILD(n, off + 2)); - if (!suite_seq) - return NULL; - - if (orelse != NULL) { - get_last_end_pos(orelse, &end_lineno, &end_col_offset); - } else { - get_last_end_pos(suite_seq, &end_lineno, &end_col_offset); - } - asdl_seq_SET(newobj, 0, - If(expression, suite_seq, orelse, - LINENO(CHILD(n, off - 1)), - CHILD(n, off - 1)->n_col_offset, - end_lineno, end_col_offset, c->c_arena)); - orelse = newobj; - } - expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, CHILD(n, 3)); - if (!suite_seq) - return NULL; - get_last_end_pos(orelse, &end_lineno, &end_col_offset); - return If(expression, suite_seq, orelse, - LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - - PyErr_Format(PyExc_SystemError, - "unexpected token in 'if' statement: %s", s); - return NULL; -} - -static stmt_ty -ast_for_while_stmt(struct compiling *c, const node *n) -{ - /* while_stmt: 'while' test ':' suite ['else' ':' suite] */ - REQ(n, while_stmt); - int end_lineno, end_col_offset; - - if (NCH(n) == 4) { - expr_ty expression; - asdl_seq *suite_seq; - - expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, CHILD(n, 3)); - if (!suite_seq) - return NULL; - get_last_end_pos(suite_seq, &end_lineno, &end_col_offset); - return While(expression, suite_seq, NULL, LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - else if (NCH(n) == 7) { - expr_ty expression; - asdl_seq *seq1, *seq2; - - expression = ast_for_expr(c, CHILD(n, 1)); - if (!expression) - return NULL; - seq1 = ast_for_suite(c, CHILD(n, 3)); - if (!seq1) - return NULL; - seq2 = ast_for_suite(c, CHILD(n, 6)); - if (!seq2) - return NULL; - get_last_end_pos(seq2, &end_lineno, &end_col_offset); - - return While(expression, seq1, seq2, LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - - PyErr_Format(PyExc_SystemError, - "wrong number of tokens for 'while' statement: %d", - NCH(n)); - return NULL; -} - -static stmt_ty -ast_for_for_stmt(struct compiling *c, const node *n0, bool is_async) -{ - const node * const n = is_async ? CHILD(n0, 1) : n0; - asdl_seq *_target, *seq = NULL, *suite_seq; - expr_ty expression; - expr_ty target, first; - const node *node_target; - int end_lineno, end_col_offset; - int has_type_comment; - string type_comment; - - if (is_async && c->c_feature_version < 5) { - ast_error(c, n, - "Async for loops are only supported in Python 3.5 and greater"); - return NULL; - } - - /* for_stmt: 'for' exprlist 'in' testlist ':' [TYPE_COMMENT] suite ['else' ':' suite] */ - REQ(n, for_stmt); - - has_type_comment = TYPE(CHILD(n, 5)) == TYPE_COMMENT; - - if (NCH(n) == 9 + has_type_comment) { - seq = ast_for_suite(c, CHILD(n, 8 + has_type_comment)); - if (!seq) - return NULL; - } - - node_target = CHILD(n, 1); - _target = ast_for_exprlist(c, node_target, Store); - if (!_target) - return NULL; - /* Check the # of children rather than the length of _target, since - for x, in ... has 1 element in _target, but still requires a Tuple. */ - first = (expr_ty)asdl_seq_GET(_target, 0); - if (NCH(node_target) == 1) - target = first; - else - target = Tuple(_target, Store, first->lineno, first->col_offset, - node_target->n_end_lineno, node_target->n_end_col_offset, - c->c_arena); - - expression = ast_for_testlist(c, CHILD(n, 3)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, CHILD(n, 5 + has_type_comment)); - if (!suite_seq) - return NULL; - - if (seq != NULL) { - get_last_end_pos(seq, &end_lineno, &end_col_offset); - } else { - get_last_end_pos(suite_seq, &end_lineno, &end_col_offset); - } - - if (has_type_comment) { - type_comment = NEW_TYPE_COMMENT(CHILD(n, 5)); - if (!type_comment) - return NULL; - } - else - type_comment = NULL; - - if (is_async) - return AsyncFor(target, expression, suite_seq, seq, type_comment, - LINENO(n0), n0->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - else - return For(target, expression, suite_seq, seq, type_comment, - LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); -} - -static excepthandler_ty -ast_for_except_clause(struct compiling *c, const node *exc, node *body) -{ - /* except_clause: 'except' [test ['as' test]] */ - int end_lineno, end_col_offset; - REQ(exc, except_clause); - REQ(body, suite); - - if (NCH(exc) == 1) { - asdl_seq *suite_seq = ast_for_suite(c, body); - if (!suite_seq) - return NULL; - get_last_end_pos(suite_seq, &end_lineno, &end_col_offset); - - return ExceptHandler(NULL, NULL, suite_seq, LINENO(exc), - exc->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - else if (NCH(exc) == 2) { - expr_ty expression; - asdl_seq *suite_seq; - - expression = ast_for_expr(c, CHILD(exc, 1)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, body); - if (!suite_seq) - return NULL; - get_last_end_pos(suite_seq, &end_lineno, &end_col_offset); - - return ExceptHandler(expression, NULL, suite_seq, LINENO(exc), - exc->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - else if (NCH(exc) == 4) { - asdl_seq *suite_seq; - expr_ty expression; - identifier e = NEW_IDENTIFIER(CHILD(exc, 3)); - if (!e) - return NULL; - if (forbidden_name(c, e, CHILD(exc, 3), 0)) - return NULL; - expression = ast_for_expr(c, CHILD(exc, 1)); - if (!expression) - return NULL; - suite_seq = ast_for_suite(c, body); - if (!suite_seq) - return NULL; - get_last_end_pos(suite_seq, &end_lineno, &end_col_offset); - - return ExceptHandler(expression, e, suite_seq, LINENO(exc), - exc->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - - PyErr_Format(PyExc_SystemError, - "wrong number of children for 'except' clause: %d", - NCH(exc)); - return NULL; -} - -static stmt_ty -ast_for_try_stmt(struct compiling *c, const node *n) -{ - const int nch = NCH(n); - int end_lineno, end_col_offset, n_except = (nch - 3)/3; - asdl_seq *body, *handlers = NULL, *orelse = NULL, *finally = NULL; - excepthandler_ty last_handler; - - REQ(n, try_stmt); - - body = ast_for_suite(c, CHILD(n, 2)); - if (body == NULL) - return NULL; - - if (TYPE(CHILD(n, nch - 3)) == NAME) { - if (strcmp(STR(CHILD(n, nch - 3)), "finally") == 0) { - if (nch >= 9 && TYPE(CHILD(n, nch - 6)) == NAME) { - /* we can assume it's an "else", - because nch >= 9 for try-else-finally and - it would otherwise have a type of except_clause */ - orelse = ast_for_suite(c, CHILD(n, nch - 4)); - if (orelse == NULL) - return NULL; - n_except--; - } - - finally = ast_for_suite(c, CHILD(n, nch - 1)); - if (finally == NULL) - return NULL; - n_except--; - } - else { - /* we can assume it's an "else", - otherwise it would have a type of except_clause */ - orelse = ast_for_suite(c, CHILD(n, nch - 1)); - if (orelse == NULL) - return NULL; - n_except--; - } - } - else if (TYPE(CHILD(n, nch - 3)) != except_clause) { - ast_error(c, n, "malformed 'try' statement"); - return NULL; - } - - if (n_except > 0) { - int i; - /* process except statements to create a try ... except */ - handlers = _Py_asdl_seq_new(n_except, c->c_arena); - if (handlers == NULL) - return NULL; - - for (i = 0; i < n_except; i++) { - excepthandler_ty e = ast_for_except_clause(c, CHILD(n, 3 + i * 3), - CHILD(n, 5 + i * 3)); - if (!e) - return NULL; - asdl_seq_SET(handlers, i, e); - } - } - - assert(finally != NULL || asdl_seq_LEN(handlers)); - if (finally != NULL) { - // finally is always last - get_last_end_pos(finally, &end_lineno, &end_col_offset); - } else if (orelse != NULL) { - // otherwise else is last - get_last_end_pos(orelse, &end_lineno, &end_col_offset); - } else { - // inline the get_last_end_pos logic due to layout mismatch - last_handler = (excepthandler_ty) asdl_seq_GET(handlers, n_except - 1); - end_lineno = last_handler->end_lineno; - end_col_offset = last_handler->end_col_offset; - } - return Try(body, handlers, orelse, finally, LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); -} - -/* with_item: test ['as' expr] */ -static withitem_ty -ast_for_with_item(struct compiling *c, const node *n) -{ - expr_ty context_expr, optional_vars = NULL; - - REQ(n, with_item); - context_expr = ast_for_expr(c, CHILD(n, 0)); - if (!context_expr) - return NULL; - if (NCH(n) == 3) { - optional_vars = ast_for_expr(c, CHILD(n, 2)); - - if (!optional_vars) { - return NULL; - } - if (!set_context(c, optional_vars, Store, n)) { - return NULL; - } - } - - return withitem(context_expr, optional_vars, c->c_arena); -} - -/* with_stmt: 'with' with_item (',' with_item)* ':' [TYPE_COMMENT] suite */ -static stmt_ty -ast_for_with_stmt(struct compiling *c, const node *n0, bool is_async) -{ - const node * const n = is_async ? CHILD(n0, 1) : n0; - int i, n_items, nch_minus_type, has_type_comment, end_lineno, end_col_offset; - asdl_seq *items, *body; - string type_comment; - - if (is_async && c->c_feature_version < 5) { - ast_error(c, n, - "Async with statements are only supported in Python 3.5 and greater"); - return NULL; - } - - REQ(n, with_stmt); - - has_type_comment = TYPE(CHILD(n, NCH(n) - 2)) == TYPE_COMMENT; - nch_minus_type = NCH(n) - has_type_comment; - - n_items = (nch_minus_type - 2) / 2; - items = _Py_asdl_seq_new(n_items, c->c_arena); - if (!items) - return NULL; - for (i = 1; i < nch_minus_type - 2; i += 2) { - withitem_ty item = ast_for_with_item(c, CHILD(n, i)); - if (!item) - return NULL; - asdl_seq_SET(items, (i - 1) / 2, item); - } - - body = ast_for_suite(c, CHILD(n, NCH(n) - 1)); - if (!body) - return NULL; - get_last_end_pos(body, &end_lineno, &end_col_offset); - - if (has_type_comment) { - type_comment = NEW_TYPE_COMMENT(CHILD(n, NCH(n) - 2)); - if (!type_comment) - return NULL; - } - else - type_comment = NULL; - - if (is_async) - return AsyncWith(items, body, type_comment, LINENO(n0), n0->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - else - return With(items, body, type_comment, LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); -} - -static stmt_ty -ast_for_classdef(struct compiling *c, const node *n, asdl_seq *decorator_seq) -{ - /* classdef: 'class' NAME ['(' arglist ')'] ':' suite */ - PyObject *classname; - asdl_seq *s; - expr_ty call; - int end_lineno, end_col_offset; - - REQ(n, classdef); - - if (NCH(n) == 4) { /* class NAME ':' suite */ - s = ast_for_suite(c, CHILD(n, 3)); - if (!s) - return NULL; - get_last_end_pos(s, &end_lineno, &end_col_offset); - - classname = NEW_IDENTIFIER(CHILD(n, 1)); - if (!classname) - return NULL; - if (forbidden_name(c, classname, CHILD(n, 3), 0)) - return NULL; - return ClassDef(classname, NULL, NULL, s, decorator_seq, - LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - - if (TYPE(CHILD(n, 3)) == RPAR) { /* class NAME '(' ')' ':' suite */ - s = ast_for_suite(c, CHILD(n, 5)); - if (!s) - return NULL; - get_last_end_pos(s, &end_lineno, &end_col_offset); - - classname = NEW_IDENTIFIER(CHILD(n, 1)); - if (!classname) - return NULL; - if (forbidden_name(c, classname, CHILD(n, 3), 0)) - return NULL; - return ClassDef(classname, NULL, NULL, s, decorator_seq, - LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); - } - - /* class NAME '(' arglist ')' ':' suite */ - /* build up a fake Call node so we can extract its pieces */ - { - PyObject *dummy_name; - expr_ty dummy; - dummy_name = NEW_IDENTIFIER(CHILD(n, 1)); - if (!dummy_name) - return NULL; - dummy = Name(dummy_name, Load, LINENO(n), n->n_col_offset, - CHILD(n, 1)->n_end_lineno, CHILD(n, 1)->n_end_col_offset, - c->c_arena); - call = ast_for_call(c, CHILD(n, 3), dummy, - CHILD(n, 1), NULL, CHILD(n, 4)); - if (!call) - return NULL; - } - s = ast_for_suite(c, CHILD(n, 6)); - if (!s) - return NULL; - get_last_end_pos(s, &end_lineno, &end_col_offset); - - classname = NEW_IDENTIFIER(CHILD(n, 1)); - if (!classname) - return NULL; - if (forbidden_name(c, classname, CHILD(n, 1), 0)) - return NULL; - - return ClassDef(classname, call->v.Call.args, call->v.Call.keywords, s, - decorator_seq, LINENO(n), n->n_col_offset, - end_lineno, end_col_offset, c->c_arena); -} - -static stmt_ty -ast_for_stmt(struct compiling *c, const node *n) -{ - if (TYPE(n) == stmt) { - assert(NCH(n) == 1); - n = CHILD(n, 0); - } - if (TYPE(n) == simple_stmt) { - assert(num_stmts(n) == 1); - n = CHILD(n, 0); - } - if (TYPE(n) == small_stmt) { - n = CHILD(n, 0); - /* small_stmt: expr_stmt | del_stmt | pass_stmt | flow_stmt - | import_stmt | global_stmt | nonlocal_stmt | assert_stmt - */ - switch (TYPE(n)) { - case expr_stmt: - return ast_for_expr_stmt(c, n); - case del_stmt: - return ast_for_del_stmt(c, n); - case pass_stmt: - return Pass(LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - case flow_stmt: - return ast_for_flow_stmt(c, n); - case import_stmt: - return ast_for_import_stmt(c, n); - case global_stmt: - return ast_for_global_stmt(c, n); - case nonlocal_stmt: - return ast_for_nonlocal_stmt(c, n); - case assert_stmt: - return ast_for_assert_stmt(c, n); - default: - PyErr_Format(PyExc_SystemError, - "unhandled small_stmt: TYPE=%d NCH=%d\n", - TYPE(n), NCH(n)); - return NULL; - } - } - else { - /* compound_stmt: if_stmt | while_stmt | for_stmt | try_stmt - | funcdef | classdef | decorated | async_stmt - */ - node *ch = CHILD(n, 0); - REQ(n, compound_stmt); - switch (TYPE(ch)) { - case if_stmt: - return ast_for_if_stmt(c, ch); - case while_stmt: - return ast_for_while_stmt(c, ch); - case for_stmt: - return ast_for_for_stmt(c, ch, 0); - case try_stmt: - return ast_for_try_stmt(c, ch); - case with_stmt: - return ast_for_with_stmt(c, ch, 0); - case funcdef: - return ast_for_funcdef(c, ch, NULL); - case classdef: - return ast_for_classdef(c, ch, NULL); - case decorated: - return ast_for_decorated(c, ch); - case async_stmt: - return ast_for_async_stmt(c, ch); - default: - PyErr_Format(PyExc_SystemError, - "unhandled compound_stmt: TYPE=%d NCH=%d\n", - TYPE(n), NCH(n)); - return NULL; - } - } -} - -static PyObject * -parsenumber_raw(struct compiling *c, const char *s) -{ - const char *end; - long x; - double dx; - Py_complex compl; - int imflag; - - assert(s != NULL); - errno = 0; - end = s + strlen(s) - 1; - imflag = *end == 'j' || *end == 'J'; - if (s[0] == '0') { - x = (long) PyOS_strtoul(s, (char **)&end, 0); - if (x < 0 && errno == 0) { - return PyLong_FromString(s, (char **)0, 0); - } - } - else - x = PyOS_strtol(s, (char **)&end, 0); - if (*end == '\0') { - if (errno != 0) - return PyLong_FromString(s, (char **)0, 0); - return PyLong_FromLong(x); - } - /* XXX Huge floats may silently fail */ - if (imflag) { - compl.real = 0.; - compl.imag = PyOS_string_to_double(s, (char **)&end, NULL); - if (compl.imag == -1.0 && PyErr_Occurred()) - return NULL; - return PyComplex_FromCComplex(compl); - } - else - { - dx = PyOS_string_to_double(s, NULL, NULL); - if (dx == -1.0 && PyErr_Occurred()) - return NULL; - return PyFloat_FromDouble(dx); - } -} - -static PyObject * -parsenumber(struct compiling *c, const char *s) -{ - char *dup, *end; - PyObject *res = NULL; - - assert(s != NULL); - - if (strchr(s, '_') == NULL) { - return parsenumber_raw(c, s); - } - /* Create a duplicate without underscores. */ - dup = PyMem_Malloc(strlen(s) + 1); - if (dup == NULL) { - return PyErr_NoMemory(); - } - end = dup; - for (; *s; s++) { - if (*s != '_') { - *end++ = *s; - } - } - *end = '\0'; - res = parsenumber_raw(c, dup); - PyMem_Free(dup); - return res; -} - -static PyObject * -decode_utf8(struct compiling *c, const char **sPtr, const char *end) -{ - const char *s, *t; - t = s = *sPtr; - /* while (s < end && *s != '\\') s++; */ /* inefficient for u".." */ - while (s < end && (*s & 0x80)) s++; - *sPtr = s; - return PyUnicode_DecodeUTF8(t, s - t, NULL); -} - -static int -warn_invalid_escape_sequence(struct compiling *c, const node *n, - unsigned char first_invalid_escape_char) -{ - PyObject *msg = PyUnicode_FromFormat("invalid escape sequence \\%c", - first_invalid_escape_char); - if (msg == NULL) { - return -1; - } - if (PyErr_WarnExplicitObject(PyExc_DeprecationWarning, msg, - c->c_filename, LINENO(n), - NULL, NULL) < 0) - { - if (PyErr_ExceptionMatches(PyExc_DeprecationWarning)) { - /* Replace the DeprecationWarning exception with a SyntaxError - to get a more accurate error report */ - PyErr_Clear(); - ast_error(c, n, "%U", msg); - } - Py_DECREF(msg); - return -1; - } - Py_DECREF(msg); - return 0; -} - -static PyObject * -decode_unicode_with_escapes(struct compiling *c, const node *n, const char *s, - size_t len) -{ - PyObject *v, *u; - char *buf; - char *p; - const char *end; - - /* check for integer overflow */ - if (len > SIZE_MAX / 6) - return NULL; - /* "?" (2 bytes) may become "\U000000E4" (10 bytes), or 1:5 - "\?" (3 bytes) may become "\u005c\U000000E4" (16 bytes), or ~1:6 */ - u = PyBytes_FromStringAndSize((char *)NULL, len * 6); - if (u == NULL) - return NULL; - p = buf = PyBytes_AsString(u); - end = s + len; - while (s < end) { - if (*s == '\\') { - *p++ = *s++; - if (s >= end || *s & 0x80) { - strcpy(p, "u005c"); - p += 5; - if (s >= end) - break; - } - } - if (*s & 0x80) { /* XXX inefficient */ - PyObject *w; - int kind; - const void *data; - Py_ssize_t len, i; - w = decode_utf8(c, &s, end); - if (w == NULL) { - Py_DECREF(u); - return NULL; - } - kind = PyUnicode_KIND(w); - data = PyUnicode_DATA(w); - len = PyUnicode_GET_LENGTH(w); - for (i = 0; i < len; i++) { - Py_UCS4 chr = PyUnicode_READ(kind, data, i); - sprintf(p, "\\U%08x", chr); - p += 10; - } - /* Should be impossible to overflow */ - assert(p - buf <= PyBytes_GET_SIZE(u)); - Py_DECREF(w); - } else { - *p++ = *s++; - } - } - len = p - buf; - s = buf; - - const char *first_invalid_escape; - v = _PyUnicode_DecodeUnicodeEscape(s, len, NULL, &first_invalid_escape); - - if (v != NULL && first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(c, n, *first_invalid_escape) < 0) { - /* We have not decref u before because first_invalid_escape points - inside u. */ - Py_XDECREF(u); - Py_DECREF(v); - return NULL; - } - } - Py_XDECREF(u); - return v; -} - -static PyObject * -decode_bytes_with_escapes(struct compiling *c, const node *n, const char *s, - size_t len) -{ - const char *first_invalid_escape; - PyObject *result = _PyBytes_DecodeEscape(s, len, NULL, - &first_invalid_escape); - if (result == NULL) - return NULL; - - if (first_invalid_escape != NULL) { - if (warn_invalid_escape_sequence(c, n, *first_invalid_escape) < 0) { - Py_DECREF(result); - return NULL; - } - } - return result; -} - -/* Shift locations for the given node and all its children by adding `lineno` - and `col_offset` to existing locations. */ -static void fstring_shift_node_locations(node *n, int lineno, int col_offset) -{ - n->n_col_offset = n->n_col_offset + col_offset; - n->n_end_col_offset = n->n_end_col_offset + col_offset; - for (int i = 0; i < NCH(n); ++i) { - if (n->n_lineno && n->n_lineno < CHILD(n, i)->n_lineno) { - /* Shifting column offsets unnecessary if there's been newlines. */ - col_offset = 0; - } - fstring_shift_node_locations(CHILD(n, i), lineno, col_offset); - } - n->n_lineno = n->n_lineno + lineno; - n->n_end_lineno = n->n_end_lineno + lineno; -} - -/* Fix locations for the given node and its children. - - `parent` is the enclosing node. - `n` is the node which locations are going to be fixed relative to parent. - `expr_str` is the child node's string representation, including braces. -*/ -static void -fstring_fix_node_location(const node *parent, node *n, char *expr_str) -{ - char *substr = NULL; - char *start; - int lines = LINENO(parent) - 1; - int cols = parent->n_col_offset; - /* Find the full fstring to fix location information in `n`. */ - while (parent && parent->n_type != STRING) - parent = parent->n_child; - if (parent && parent->n_str) { - substr = strstr(parent->n_str, expr_str); - if (substr) { - start = substr; - while (start > parent->n_str) { - if (start[0] == '\n') - break; - start--; - } - cols += (int)(substr - start); - /* adjust the start based on the number of newlines encountered - before the f-string expression */ - for (char* p = parent->n_str; p < substr; p++) { - if (*p == '\n') { - lines++; - } - } - } - } - fstring_shift_node_locations(n, lines, cols); -} - -/* Compile this expression in to an expr_ty. Add parens around the - expression, in order to allow leading spaces in the expression. */ -static expr_ty -fstring_compile_expr(const char *expr_start, const char *expr_end, - struct compiling *c, const node *n) - -{ - node *mod_n; - mod_ty mod; - char *str; - Py_ssize_t len; - const char *s; - - assert(expr_end >= expr_start); - assert(*(expr_start-1) == '{'); - assert(*expr_end == '}' || *expr_end == '!' || *expr_end == ':' || - *expr_end == '='); - - /* If the substring is all whitespace, it's an error. We need to catch this - here, and not when we call PyParser_SimpleParseStringFlagsFilename, - because turning the expression '' in to '()' would go from being invalid - to valid. */ - for (s = expr_start; s != expr_end; s++) { - char c = *s; - /* The Python parser ignores only the following whitespace - characters (\r already is converted to \n). */ - if (!(c == ' ' || c == '\t' || c == '\n' || c == '\f')) { - break; - } - } - if (s == expr_end) { - ast_error(c, n, "f-string: empty expression not allowed"); - return NULL; - } - - len = expr_end - expr_start; - /* Allocate 3 extra bytes: open paren, close paren, null byte. */ - str = PyMem_RawMalloc(len + 3); - if (str == NULL) { - PyErr_NoMemory(); - return NULL; - } - - str[0] = '('; - memcpy(str+1, expr_start, len); - str[len+1] = ')'; - str[len+2] = 0; - - PyCompilerFlags cf = _PyCompilerFlags_INIT; - cf.cf_flags = PyCF_ONLY_AST; - mod_n = PyParser_SimpleParseStringFlagsFilename(str, "", - Py_eval_input, 0); - if (!mod_n) { - PyMem_RawFree(str); - return NULL; - } - /* Reuse str to find the correct column offset. */ - str[0] = '{'; - str[len+1] = '}'; - fstring_fix_node_location(n, mod_n, str); - mod = PyAST_FromNode(mod_n, &cf, "", c->c_arena); - PyMem_RawFree(str); - PyNode_Free(mod_n); - if (!mod) - return NULL; - return mod->v.Expression.body; -} - -/* Return -1 on error. - - Return 0 if we reached the end of the literal. - - Return 1 if we haven't reached the end of the literal, but we want - the caller to process the literal up to this point. Used for - doubled braces. -*/ -static int -fstring_find_literal(const char **str, const char *end, int raw, - PyObject **literal, int recurse_lvl, - struct compiling *c, const node *n) -{ - /* Get any literal string. It ends when we hit an un-doubled left - brace (which isn't part of a unicode name escape such as - "\N{EULER CONSTANT}"), or the end of the string. */ - - const char *s = *str; - const char *literal_start = s; - int result = 0; - - assert(*literal == NULL); - while (s < end) { - char ch = *s++; - if (!raw && ch == '\\' && s < end) { - ch = *s++; - if (ch == 'N') { - if (s < end && *s++ == '{') { - while (s < end && *s++ != '}') { - } - continue; - } - break; - } - if (ch == '{' && warn_invalid_escape_sequence(c, n, ch) < 0) { - return -1; - } - } - if (ch == '{' || ch == '}') { - /* Check for doubled braces, but only at the top level. If - we checked at every level, then f'{0:{3}}' would fail - with the two closing braces. */ - if (recurse_lvl == 0) { - if (s < end && *s == ch) { - /* We're going to tell the caller that the literal ends - here, but that they should continue scanning. But also - skip over the second brace when we resume scanning. */ - *str = s + 1; - result = 1; - goto done; - } - - /* Where a single '{' is the start of a new expression, a - single '}' is not allowed. */ - if (ch == '}') { - *str = s - 1; - ast_error(c, n, "f-string: single '}' is not allowed"); - return -1; - } - } - /* We're either at a '{', which means we're starting another - expression; or a '}', which means we're at the end of this - f-string (for a nested format_spec). */ - s--; - break; - } - } - *str = s; - assert(s <= end); - assert(s == end || *s == '{' || *s == '}'); -done: - if (literal_start != s) { - if (raw) - *literal = PyUnicode_DecodeUTF8Stateful(literal_start, - s - literal_start, - NULL, NULL); - else - *literal = decode_unicode_with_escapes(c, n, literal_start, - s - literal_start); - if (!*literal) - return -1; - } - return result; -} - -/* Forward declaration because parsing is recursive. */ -static expr_ty -fstring_parse(const char **str, const char *end, int raw, int recurse_lvl, - struct compiling *c, const node *n); - -/* Parse the f-string at *str, ending at end. We know *str starts an - expression (so it must be a '{'). Returns the FormattedValue node, which - includes the expression, conversion character, format_spec expression, and - optionally the text of the expression (if = is used). - - Note that I don't do a perfect job here: I don't make sure that a - closing brace doesn't match an opening paren, for example. It - doesn't need to error on all invalid expressions, just correctly - find the end of all valid ones. Any errors inside the expression - will be caught when we parse it later. - - *expression is set to the expression. For an '=' "debug" expression, - *expr_text is set to the debug text (the original text of the expression, - including the '=' and any whitespace around it, as a string object). If - not a debug expression, *expr_text set to NULL. */ -static int -fstring_find_expr(const char **str, const char *end, int raw, int recurse_lvl, - PyObject **expr_text, expr_ty *expression, - struct compiling *c, const node *n) -{ - /* Return -1 on error, else 0. */ - - const char *expr_start; - const char *expr_end; - expr_ty simple_expression; - expr_ty format_spec = NULL; /* Optional format specifier. */ - int conversion = -1; /* The conversion char. Use default if not - specified, or !r if using = and no format - spec. */ - - /* 0 if we're not in a string, else the quote char we're trying to - match (single or double quote). */ - char quote_char = 0; - - /* If we're inside a string, 1=normal, 3=triple-quoted. */ - int string_type = 0; - - /* Keep track of nesting level for braces/parens/brackets in - expressions. */ - Py_ssize_t nested_depth = 0; - char parenstack[MAXLEVEL]; - - *expr_text = NULL; - - /* Can only nest one level deep. */ - if (recurse_lvl >= 2) { - ast_error(c, n, "f-string: expressions nested too deeply"); - goto error; - } - - /* The first char must be a left brace, or we wouldn't have gotten - here. Skip over it. */ - assert(**str == '{'); - *str += 1; - - expr_start = *str; - for (; *str < end; (*str)++) { - char ch; - - /* Loop invariants. */ - assert(nested_depth >= 0); - assert(*str >= expr_start && *str < end); - if (quote_char) - assert(string_type == 1 || string_type == 3); - else - assert(string_type == 0); - - ch = **str; - /* Nowhere inside an expression is a backslash allowed. */ - if (ch == '\\') { - /* Error: can't include a backslash character, inside - parens or strings or not. */ - ast_error(c, n, - "f-string expression part " - "cannot include a backslash"); - goto error; - } - if (quote_char) { - /* We're inside a string. See if we're at the end. */ - /* This code needs to implement the same non-error logic - as tok_get from tokenizer.c, at the letter_quote - label. To actually share that code would be a - nightmare. But, it's unlikely to change and is small, - so duplicate it here. Note we don't need to catch all - of the errors, since they'll be caught when parsing the - expression. We just need to match the non-error - cases. Thus we can ignore \n in single-quoted strings, - for example. Or non-terminated strings. */ - if (ch == quote_char) { - /* Does this match the string_type (single or triple - quoted)? */ - if (string_type == 3) { - if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) { - /* We're at the end of a triple quoted string. */ - *str += 2; - string_type = 0; - quote_char = 0; - continue; - } - } else { - /* We're at the end of a normal string. */ - quote_char = 0; - string_type = 0; - continue; - } - } - } else if (ch == '\'' || ch == '"') { - /* Is this a triple quoted string? */ - if (*str+2 < end && *(*str+1) == ch && *(*str+2) == ch) { - string_type = 3; - *str += 2; - } else { - /* Start of a normal string. */ - string_type = 1; - } - /* Start looking for the end of the string. */ - quote_char = ch; - } else if (ch == '[' || ch == '{' || ch == '(') { - if (nested_depth >= MAXLEVEL) { - ast_error(c, n, "f-string: too many nested parenthesis"); - goto error; - } - parenstack[nested_depth] = ch; - nested_depth++; - } else if (ch == '#') { - /* Error: can't include a comment character, inside parens - or not. */ - ast_error(c, n, "f-string expression part cannot include '#'"); - goto error; - } else if (nested_depth == 0 && - (ch == '!' || ch == ':' || ch == '}' || - ch == '=' || ch == '>' || ch == '<')) { - /* See if there's a next character. */ - if (*str+1 < end) { - char next = *(*str+1); - - /* For "!=". since '=' is not an allowed conversion character, - nothing is lost in this test. */ - if ((ch == '!' && next == '=') || /* != */ - (ch == '=' && next == '=') || /* == */ - (ch == '<' && next == '=') || /* <= */ - (ch == '>' && next == '=') /* >= */ - ) { - *str += 1; - continue; - } - /* Don't get out of the loop for these, if they're single - chars (not part of 2-char tokens). If by themselves, they - don't end an expression (unlike say '!'). */ - if (ch == '>' || ch == '<') { - continue; - } - } - - /* Normal way out of this loop. */ - break; - } else if (ch == ']' || ch == '}' || ch == ')') { - if (!nested_depth) { - ast_error(c, n, "f-string: unmatched '%c'", ch); - goto error; - } - nested_depth--; - int opening = parenstack[nested_depth]; - if (!((opening == '(' && ch == ')') || - (opening == '[' && ch == ']') || - (opening == '{' && ch == '}'))) - { - ast_error(c, n, - "f-string: closing parenthesis '%c' " - "does not match opening parenthesis '%c'", - ch, opening); - goto error; - } - } else { - /* Just consume this char and loop around. */ - } - } - expr_end = *str; - /* If we leave this loop in a string or with mismatched parens, we - don't care. We'll get a syntax error when compiling the - expression. But, we can produce a better error message, so - let's just do that.*/ - if (quote_char) { - ast_error(c, n, "f-string: unterminated string"); - goto error; - } - if (nested_depth) { - int opening = parenstack[nested_depth - 1]; - ast_error(c, n, "f-string: unmatched '%c'", opening); - goto error; - } - - if (*str >= end) - goto unexpected_end_of_string; - - /* Compile the expression as soon as possible, so we show errors - related to the expression before errors related to the - conversion or format_spec. */ - simple_expression = fstring_compile_expr(expr_start, expr_end, c, n); - if (!simple_expression) - goto error; - - /* Check for =, which puts the text value of the expression in - expr_text. */ - if (**str == '=') { - if (c->c_feature_version < 8) { - ast_error(c, n, - "f-string: self documenting expressions are " - "only supported in Python 3.8 and greater"); - goto error; - } - *str += 1; - - /* Skip over ASCII whitespace. No need to test for end of string - here, since we know there's at least a trailing quote somewhere - ahead. */ - while (Py_ISSPACE(**str)) { - *str += 1; - } - - /* Set *expr_text to the text of the expression. */ - *expr_text = PyUnicode_FromStringAndSize(expr_start, *str-expr_start); - if (!*expr_text) { - goto error; - } - } - - /* Check for a conversion char, if present. */ - if (**str == '!') { - *str += 1; - if (*str >= end) - goto unexpected_end_of_string; - - conversion = **str; - *str += 1; - - /* Validate the conversion. */ - if (!(conversion == 's' || conversion == 'r' || conversion == 'a')) { - ast_error(c, n, - "f-string: invalid conversion character: " - "expected 's', 'r', or 'a'"); - goto error; - } - - } - - /* Check for the format spec, if present. */ - if (*str >= end) - goto unexpected_end_of_string; - if (**str == ':') { - *str += 1; - if (*str >= end) - goto unexpected_end_of_string; - - /* Parse the format spec. */ - format_spec = fstring_parse(str, end, raw, recurse_lvl+1, c, n); - if (!format_spec) - goto error; - } - - if (*str >= end || **str != '}') - goto unexpected_end_of_string; - - /* We're at a right brace. Consume it. */ - assert(*str < end); - assert(**str == '}'); - *str += 1; - - /* If we're in = mode (detected by non-NULL expr_text), and have no format - spec and no explicit conversion, set the conversion to 'r'. */ - if (*expr_text && format_spec == NULL && conversion == -1) { - conversion = 'r'; - } - - /* And now create the FormattedValue node that represents this - entire expression with the conversion and format spec. */ - *expression = FormattedValue(simple_expression, conversion, - format_spec, LINENO(n), - n->n_col_offset, n->n_end_lineno, - n->n_end_col_offset, c->c_arena); - if (!*expression) - goto error; - - return 0; - -unexpected_end_of_string: - ast_error(c, n, "f-string: expecting '}'"); - /* Falls through to error. */ - -error: - Py_XDECREF(*expr_text); - return -1; - -} - -/* Return -1 on error. - - Return 0 if we have a literal (possible zero length) and an - expression (zero length if at the end of the string. - - Return 1 if we have a literal, but no expression, and we want the - caller to call us again. This is used to deal with doubled - braces. - - When called multiple times on the string 'a{{b{0}c', this function - will return: - - 1. the literal 'a{' with no expression, and a return value - of 1. Despite the fact that there's no expression, the return - value of 1 means we're not finished yet. - - 2. the literal 'b' and the expression '0', with a return value of - 0. The fact that there's an expression means we're not finished. - - 3. literal 'c' with no expression and a return value of 0. The - combination of the return value of 0 with no expression means - we're finished. -*/ -static int -fstring_find_literal_and_expr(const char **str, const char *end, int raw, - int recurse_lvl, PyObject **literal, - PyObject **expr_text, expr_ty *expression, - struct compiling *c, const node *n) -{ - int result; - - assert(*literal == NULL && *expression == NULL); - - /* Get any literal string. */ - result = fstring_find_literal(str, end, raw, literal, recurse_lvl, c, n); - if (result < 0) - goto error; - - assert(result == 0 || result == 1); - - if (result == 1) - /* We have a literal, but don't look at the expression. */ - return 1; - - if (*str >= end || **str == '}') - /* We're at the end of the string or the end of a nested - f-string: no expression. The top-level error case where we - expect to be at the end of the string but we're at a '}' is - handled later. */ - return 0; - - /* We must now be the start of an expression, on a '{'. */ - assert(**str == '{'); - - if (fstring_find_expr(str, end, raw, recurse_lvl, expr_text, - expression, c, n) < 0) - goto error; - - return 0; - -error: - Py_CLEAR(*literal); - return -1; -} - -#define EXPRLIST_N_CACHED 64 - -typedef struct { - /* Incrementally build an array of expr_ty, so be used in an - asdl_seq. Cache some small but reasonably sized number of - expr_ty's, and then after that start dynamically allocating, - doubling the number allocated each time. Note that the f-string - f'{0}a{1}' contains 3 expr_ty's: 2 FormattedValue's, and one - Constant for the literal 'a'. So you add expr_ty's about twice as - fast as you add expressions in an f-string. */ - - Py_ssize_t allocated; /* Number we've allocated. */ - Py_ssize_t size; /* Number we've used. */ - expr_ty *p; /* Pointer to the memory we're actually - using. Will point to 'data' until we - start dynamically allocating. */ - expr_ty data[EXPRLIST_N_CACHED]; -} ExprList; - -#ifdef NDEBUG -#define ExprList_check_invariants(l) -#else -static void -ExprList_check_invariants(ExprList *l) -{ - /* Check our invariants. Make sure this object is "live", and - hasn't been deallocated. */ - assert(l->size >= 0); - assert(l->p != NULL); - if (l->size <= EXPRLIST_N_CACHED) - assert(l->data == l->p); -} -#endif - -static void -ExprList_Init(ExprList *l) -{ - l->allocated = EXPRLIST_N_CACHED; - l->size = 0; - - /* Until we start allocating dynamically, p points to data. */ - l->p = l->data; - - ExprList_check_invariants(l); -} - -static int -ExprList_Append(ExprList *l, expr_ty exp) -{ - ExprList_check_invariants(l); - if (l->size >= l->allocated) { - /* We need to alloc (or realloc) the memory. */ - Py_ssize_t new_size = l->allocated * 2; - - /* See if we've ever allocated anything dynamically. */ - if (l->p == l->data) { - Py_ssize_t i; - /* We're still using the cached data. Switch to - alloc-ing. */ - l->p = PyMem_RawMalloc(sizeof(expr_ty) * new_size); - if (!l->p) - return -1; - /* Copy the cached data into the new buffer. */ - for (i = 0; i < l->size; i++) - l->p[i] = l->data[i]; - } else { - /* Just realloc. */ - expr_ty *tmp = PyMem_RawRealloc(l->p, sizeof(expr_ty) * new_size); - if (!tmp) { - PyMem_RawFree(l->p); - l->p = NULL; - return -1; - } - l->p = tmp; - } - - l->allocated = new_size; - assert(l->allocated == 2 * l->size); - } - - l->p[l->size++] = exp; - - ExprList_check_invariants(l); - return 0; -} - -static void -ExprList_Dealloc(ExprList *l) -{ - ExprList_check_invariants(l); - - /* If there's been an error, or we've never dynamically allocated, - do nothing. */ - if (!l->p || l->p == l->data) { - /* Do nothing. */ - } else { - /* We have dynamically allocated. Free the memory. */ - PyMem_RawFree(l->p); - } - l->p = NULL; - l->size = -1; -} - -static asdl_seq * -ExprList_Finish(ExprList *l, PyArena *arena) -{ - asdl_seq *seq; - - ExprList_check_invariants(l); - - /* Allocate the asdl_seq and copy the expressions in to it. */ - seq = _Py_asdl_seq_new(l->size, arena); - if (seq) { - Py_ssize_t i; - for (i = 0; i < l->size; i++) - asdl_seq_SET(seq, i, l->p[i]); - } - ExprList_Dealloc(l); - return seq; -} - -/* The FstringParser is designed to add a mix of strings and - f-strings, and concat them together as needed. Ultimately, it - generates an expr_ty. */ -typedef struct { - PyObject *last_str; - ExprList expr_list; - int fmode; -} FstringParser; - -#ifdef NDEBUG -#define FstringParser_check_invariants(state) -#else -static void -FstringParser_check_invariants(FstringParser *state) -{ - if (state->last_str) - assert(PyUnicode_CheckExact(state->last_str)); - ExprList_check_invariants(&state->expr_list); -} -#endif - -static void -FstringParser_Init(FstringParser *state) -{ - state->last_str = NULL; - state->fmode = 0; - ExprList_Init(&state->expr_list); - FstringParser_check_invariants(state); -} - -static void -FstringParser_Dealloc(FstringParser *state) -{ - FstringParser_check_invariants(state); - - Py_XDECREF(state->last_str); - ExprList_Dealloc(&state->expr_list); -} - -/* Constants for the following */ -static PyObject *u_kind; - -/* Compute 'kind' field for string Constant (either 'u' or None) */ -static PyObject * -make_kind(struct compiling *c, const node *n) -{ - char *s = NULL; - PyObject *kind = NULL; - - /* Find the first string literal, if any */ - while (TYPE(n) != STRING) { - if (NCH(n) == 0) - return NULL; - n = CHILD(n, 0); - } - REQ(n, STRING); - - /* If it starts with 'u', return a PyUnicode "u" string */ - s = STR(n); - if (s && *s == 'u') { - if (!u_kind) { - u_kind = PyUnicode_InternFromString("u"); - if (!u_kind) - return NULL; - } - kind = u_kind; - if (PyArena_AddPyObject(c->c_arena, kind) < 0) { - return NULL; - } - Py_INCREF(kind); - } - return kind; -} - -/* Make a Constant node, but decref the PyUnicode object being added. */ -static expr_ty -make_str_node_and_del(PyObject **str, struct compiling *c, const node* n) -{ - PyObject *s = *str; - PyObject *kind = NULL; - *str = NULL; - assert(PyUnicode_CheckExact(s)); - if (PyArena_AddPyObject(c->c_arena, s) < 0) { - Py_DECREF(s); - return NULL; - } - kind = make_kind(c, n); - if (kind == NULL && PyErr_Occurred()) - return NULL; - return Constant(s, kind, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); -} - -/* Add a non-f-string (that is, a regular literal string). str is - decref'd. */ -static int -FstringParser_ConcatAndDel(FstringParser *state, PyObject *str) -{ - FstringParser_check_invariants(state); - - assert(PyUnicode_CheckExact(str)); - - if (PyUnicode_GET_LENGTH(str) == 0) { - Py_DECREF(str); - return 0; - } - - if (!state->last_str) { - /* We didn't have a string before, so just remember this one. */ - state->last_str = str; - } else { - /* Concatenate this with the previous string. */ - PyUnicode_AppendAndDel(&state->last_str, str); - if (!state->last_str) - return -1; - } - FstringParser_check_invariants(state); - return 0; -} - -/* Parse an f-string. The f-string is in *str to end, with no - 'f' or quotes. */ -static int -FstringParser_ConcatFstring(FstringParser *state, const char **str, - const char *end, int raw, int recurse_lvl, - struct compiling *c, const node *n) -{ - FstringParser_check_invariants(state); - state->fmode = 1; - - /* Parse the f-string. */ - while (1) { - PyObject *literal = NULL; - PyObject *expr_text = NULL; - expr_ty expression = NULL; - - /* If there's a zero length literal in front of the - expression, literal will be NULL. If we're at the end of - the f-string, expression will be NULL (unless result == 1, - see below). */ - int result = fstring_find_literal_and_expr(str, end, raw, recurse_lvl, - &literal, &expr_text, - &expression, c, n); - if (result < 0) - return -1; - - /* Add the literal, if any. */ - if (literal && FstringParser_ConcatAndDel(state, literal) < 0) { - Py_XDECREF(expr_text); - return -1; - } - /* Add the expr_text, if any. */ - if (expr_text && FstringParser_ConcatAndDel(state, expr_text) < 0) { - return -1; - } - - /* We've dealt with the literal and expr_text, their ownership has - been transferred to the state object. Don't look at them again. */ - - /* See if we should just loop around to get the next literal - and expression, while ignoring the expression this - time. This is used for un-doubling braces, as an - optimization. */ - if (result == 1) - continue; - - if (!expression) - /* We're done with this f-string. */ - break; - - /* We know we have an expression. Convert any existing string - to a Constant node. */ - if (!state->last_str) { - /* Do nothing. No previous literal. */ - } else { - /* Convert the existing last_str literal to a Constant node. */ - expr_ty str = make_str_node_and_del(&state->last_str, c, n); - if (!str || ExprList_Append(&state->expr_list, str) < 0) - return -1; - } - - if (ExprList_Append(&state->expr_list, expression) < 0) - return -1; - } - - /* If recurse_lvl is zero, then we must be at the end of the - string. Otherwise, we must be at a right brace. */ - - if (recurse_lvl == 0 && *str < end-1) { - ast_error(c, n, "f-string: unexpected end of string"); - return -1; - } - if (recurse_lvl != 0 && **str != '}') { - ast_error(c, n, "f-string: expecting '}'"); - return -1; - } - - FstringParser_check_invariants(state); - return 0; -} - -/* Convert the partial state reflected in last_str and expr_list to an - expr_ty. The expr_ty can be a Constant, or a JoinedStr. */ -static expr_ty -FstringParser_Finish(FstringParser *state, struct compiling *c, - const node *n) -{ - asdl_seq *seq; - - FstringParser_check_invariants(state); - - /* If we're just a constant string with no expressions, return - that. */ - if (!state->fmode) { - assert(!state->expr_list.size); - if (!state->last_str) { - /* Create a zero length string. */ - state->last_str = PyUnicode_FromStringAndSize(NULL, 0); - if (!state->last_str) - goto error; - } - return make_str_node_and_del(&state->last_str, c, n); - } - - /* Create a Constant node out of last_str, if needed. It will be the - last node in our expression list. */ - if (state->last_str) { - expr_ty str = make_str_node_and_del(&state->last_str, c, n); - if (!str || ExprList_Append(&state->expr_list, str) < 0) - goto error; - } - /* This has already been freed. */ - assert(state->last_str == NULL); - - seq = ExprList_Finish(&state->expr_list, c->c_arena); - if (!seq) - goto error; - - return JoinedStr(seq, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - -error: - FstringParser_Dealloc(state); - return NULL; -} - -/* Given an f-string (with no 'f' or quotes) that's in *str and ends - at end, parse it into an expr_ty. Return NULL on error. Adjust - str to point past the parsed portion. */ -static expr_ty -fstring_parse(const char **str, const char *end, int raw, int recurse_lvl, - struct compiling *c, const node *n) -{ - FstringParser state; - - FstringParser_Init(&state); - if (FstringParser_ConcatFstring(&state, str, end, raw, recurse_lvl, - c, n) < 0) { - FstringParser_Dealloc(&state); - return NULL; - } - - return FstringParser_Finish(&state, c, n); -} - -/* n is a Python string literal, including the bracketing quote - characters, and r, b, u, &/or f prefixes (if any), and embedded - escape sequences (if any). parsestr parses it, and sets *result to - decoded Python string object. If the string is an f-string, set - *fstr and *fstrlen to the unparsed string object. Return 0 if no - errors occurred. -*/ -static int -parsestr(struct compiling *c, const node *n, int *bytesmode, int *rawmode, - PyObject **result, const char **fstr, Py_ssize_t *fstrlen) -{ - size_t len; - const char *s = STR(n); - int quote = Py_CHARMASK(*s); - int fmode = 0; - *bytesmode = 0; - *rawmode = 0; - *result = NULL; - *fstr = NULL; - if (Py_ISALPHA(quote)) { - while (!*bytesmode || !*rawmode) { - if (quote == 'b' || quote == 'B') { - quote = *++s; - *bytesmode = 1; - } - else if (quote == 'u' || quote == 'U') { - quote = *++s; - } - else if (quote == 'r' || quote == 'R') { - quote = *++s; - *rawmode = 1; - } - else if (quote == 'f' || quote == 'F') { - quote = *++s; - fmode = 1; - } - else { - break; - } - } - } - - /* fstrings are only allowed in Python 3.6 and greater */ - if (fmode && c->c_feature_version < 6) { - ast_error(c, n, "Format strings are only supported in Python 3.6 and greater"); - return -1; - } - - if (fmode && *bytesmode) { - PyErr_BadInternalCall(); - return -1; - } - if (quote != '\'' && quote != '\"') { - PyErr_BadInternalCall(); - return -1; - } - /* Skip the leading quote char. */ - s++; - len = strlen(s); - if (len > INT_MAX) { - PyErr_SetString(PyExc_OverflowError, - "string to parse is too long"); - return -1; - } - if (s[--len] != quote) { - /* Last quote char must match the first. */ - PyErr_BadInternalCall(); - return -1; - } - if (len >= 4 && s[0] == quote && s[1] == quote) { - /* A triple quoted string. We've already skipped one quote at - the start and one at the end of the string. Now skip the - two at the start. */ - s += 2; - len -= 2; - /* And check that the last two match. */ - if (s[--len] != quote || s[--len] != quote) { - PyErr_BadInternalCall(); - return -1; - } - } - - if (fmode) { - /* Just return the bytes. The caller will parse the resulting - string. */ - *fstr = s; - *fstrlen = len; - return 0; - } - - /* Not an f-string. */ - /* Avoid invoking escape decoding routines if possible. */ - *rawmode = *rawmode || strchr(s, '\\') == NULL; - if (*bytesmode) { - /* Disallow non-ASCII characters. */ - const char *ch; - for (ch = s; *ch; ch++) { - if (Py_CHARMASK(*ch) >= 0x80) { - ast_error(c, n, - "bytes can only contain ASCII " - "literal characters."); - return -1; - } - } - if (*rawmode) - *result = PyBytes_FromStringAndSize(s, len); - else - *result = decode_bytes_with_escapes(c, n, s, len); - } else { - if (*rawmode) - *result = PyUnicode_DecodeUTF8Stateful(s, len, NULL, NULL); - else - *result = decode_unicode_with_escapes(c, n, s, len); - } - return *result == NULL ? -1 : 0; -} - -/* Accepts a STRING+ atom, and produces an expr_ty node. Run through - each STRING atom, and process it as needed. For bytes, just - concatenate them together, and the result will be a Constant node. For - normal strings and f-strings, concatenate them together. The result - will be a Constant node if there were no f-strings; a FormattedValue - node if there's just an f-string (with no leading or trailing - literals), or a JoinedStr node if there are multiple f-strings or - any literals involved. */ -static expr_ty -parsestrplus(struct compiling *c, const node *n) -{ - int bytesmode = 0; - PyObject *bytes_str = NULL; - int i; - - FstringParser state; - FstringParser_Init(&state); - - for (i = 0; i < NCH(n); i++) { - int this_bytesmode; - int this_rawmode; - PyObject *s; - const char *fstr; - Py_ssize_t fstrlen = -1; /* Silence a compiler warning. */ - - REQ(CHILD(n, i), STRING); - if (parsestr(c, CHILD(n, i), &this_bytesmode, &this_rawmode, &s, - &fstr, &fstrlen) != 0) - goto error; - - /* Check that we're not mixing bytes with unicode. */ - if (i != 0 && bytesmode != this_bytesmode) { - ast_error(c, n, "cannot mix bytes and nonbytes literals"); - /* s is NULL if the current string part is an f-string. */ - Py_XDECREF(s); - goto error; - } - bytesmode = this_bytesmode; - - if (fstr != NULL) { - int result; - assert(s == NULL && !bytesmode); - /* This is an f-string. Parse and concatenate it. */ - result = FstringParser_ConcatFstring(&state, &fstr, fstr+fstrlen, - this_rawmode, 0, c, n); - if (result < 0) - goto error; - } else { - /* A string or byte string. */ - assert(s != NULL && fstr == NULL); - - assert(bytesmode ? PyBytes_CheckExact(s) : - PyUnicode_CheckExact(s)); - - if (bytesmode) { - /* For bytes, concat as we go. */ - if (i == 0) { - /* First time, just remember this value. */ - bytes_str = s; - } else { - PyBytes_ConcatAndDel(&bytes_str, s); - if (!bytes_str) - goto error; - } - } else { - /* This is a regular string. Concatenate it. */ - if (FstringParser_ConcatAndDel(&state, s) < 0) - goto error; - } - } - } - if (bytesmode) { - /* Just return the bytes object and we're done. */ - if (PyArena_AddPyObject(c->c_arena, bytes_str) < 0) - goto error; - return Constant(bytes_str, NULL, LINENO(n), n->n_col_offset, - n->n_end_lineno, n->n_end_col_offset, c->c_arena); - } - - /* We're not a bytes string, bytes_str should never have been set. */ - assert(bytes_str == NULL); - - return FstringParser_Finish(&state, c, n); - -error: - Py_XDECREF(bytes_str); - FstringParser_Dealloc(&state); - return NULL; -} - PyObject * _PyAST_GetDocString(asdl_seq *body) { diff --git a/Python/compile.c b/Python/compile.c index fccc688affca6..8fe82f91559e0 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -390,21 +390,6 @@ PyAST_CompileEx(mod_ty mod, const char *filename_str, PyCompilerFlags *flags, } -PyCodeObject * -PyNode_Compile(struct _node *n, const char *filename) -{ - PyCodeObject *co = NULL; - mod_ty mod; - PyArena *arena = PyArena_New(); - if (!arena) - return NULL; - mod = PyAST_FromNode(n, NULL, filename, arena); - if (mod) - co = PyAST_Compile(mod, filename, NULL, arena); - PyArena_Free(arena); - return co; -} - static void compiler_free(struct compiler *c) { diff --git a/Python/initconfig.c b/Python/initconfig.c index 998ceb7bbfa51..d8b3df885722f 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -72,7 +72,6 @@ static const char usage_3[] = "\ -X opt : set implementation-specific option. The following options are available:\n\ \n\ -X faulthandler: enable faulthandler\n\ - -X oldparser: enable the traditional LL(1) parser; also PYTHONOLDPARSER\n\ -X showrefcount: output the total reference count and number of used\n\ memory blocks when the program finishes or after each statement in the\n\ interactive interpreter. This only works on debug builds\n\ @@ -640,7 +639,6 @@ _PyConfig_InitCompatConfig(PyConfig *config) #ifdef MS_WINDOWS config->legacy_windows_stdio = -1; #endif - config->_use_peg_parser = 1; } @@ -798,7 +796,6 @@ _PyConfig_Copy(PyConfig *config, const PyConfig *config2) COPY_ATTR(isolated); COPY_ATTR(use_environment); COPY_ATTR(dev_mode); - COPY_ATTR(_use_peg_parser); COPY_ATTR(install_signal_handlers); COPY_ATTR(use_hash_seed); COPY_ATTR(hash_seed); @@ -905,7 +902,6 @@ config_as_dict(const PyConfig *config) SET_ITEM_INT(isolated); SET_ITEM_INT(use_environment); SET_ITEM_INT(dev_mode); - SET_ITEM_INT(_use_peg_parser); SET_ITEM_INT(install_signal_handlers); SET_ITEM_INT(use_hash_seed); SET_ITEM_UINT(hash_seed); @@ -1451,11 +1447,6 @@ config_read_complex_options(PyConfig *config) config->import_time = 1; } - if (config_get_env(config, "PYTHONOLDPARSER") - || config_get_xoption(config, L"oldparser")) { - config->_use_peg_parser = 0; - } - PyStatus status; if (config->tracemalloc < 0) { status = config_init_tracemalloc(config); @@ -2549,7 +2540,6 @@ PyConfig_Read(PyConfig *config) assert(config->isolated >= 0); assert(config->use_environment >= 0); assert(config->dev_mode >= 0); - assert(config->_use_peg_parser >= 0); assert(config->install_signal_handlers >= 0); assert(config->use_hash_seed >= 0); assert(config->faulthandler >= 0); diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index f2f7d585c8000..c754f2169dece 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -18,7 +18,6 @@ #include "pycore_sysmodule.h" // _PySys_ClearAuditHooks() #include "pycore_traceback.h" // _Py_DumpTracebackThreads() -#include "grammar.h" // PyGrammar_RemoveAccelerators() #include // setlocale() #ifdef HAVE_SIGNAL_H @@ -50,7 +49,6 @@ _Py_IDENTIFIER(threading); extern "C" { #endif -extern grammar _PyParser_Grammar; /* From graminit.c */ /* Forward declarations */ static PyStatus add_main_module(PyInterpreterState *interp); @@ -1301,7 +1299,6 @@ finalize_interp_clear(PyThreadState *tstate) _PyWarnings_Fini(tstate->interp); if (is_main_interp) { - PyGrammar_RemoveAccelerators(&_PyParser_Grammar); _PyExc_Fini(); } diff --git a/Python/pythonrun.c b/Python/pythonrun.c index cb0e3b02e163a..7a3b5b52ac417 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -58,16 +58,12 @@ _Py_static_string(PyId_string, ""); extern "C" { #endif -extern Py_EXPORTED_SYMBOL grammar _PyParser_Grammar; /* From graminit.c */ - /* Forward */ static void flush_io(void); static PyObject *run_mod(mod_ty, PyObject *, PyObject *, PyObject *, PyCompilerFlags *, PyArena *); static PyObject *run_pyc_file(FILE *, const char *, PyObject *, PyObject *, PyCompilerFlags *); -static void err_input(perrdetail *); -static void err_free(perrdetail *); static int PyRun_InteractiveOneObjectEx(FILE *, PyObject *, PyCompilerFlags *); /* Parse input from a file and execute it */ @@ -148,32 +144,6 @@ PyRun_InteractiveLoopFlags(FILE *fp, const char *filename_str, PyCompilerFlags * return err; } -/* compute parser flags based on compiler flags */ -static int PARSER_FLAGS(PyCompilerFlags *flags) -{ - int parser_flags = 0; - if (!flags) - return 0; - if (flags->cf_flags & PyCF_DONT_IMPLY_DEDENT) - parser_flags |= PyPARSE_DONT_IMPLY_DEDENT; - if (flags->cf_flags & PyCF_IGNORE_COOKIE) - parser_flags |= PyPARSE_IGNORE_COOKIE; - if (flags->cf_flags & CO_FUTURE_BARRY_AS_BDFL) - parser_flags |= PyPARSE_BARRY_AS_BDFL; - if (flags->cf_flags & PyCF_TYPE_COMMENTS) - parser_flags |= PyPARSE_TYPE_COMMENTS; - return parser_flags; -} - -#if 0 -/* Keep an example of flags with future keyword support. */ -#define PARSER_FLAGS(flags) \ - ((flags) ? ((((flags)->cf_flags & PyCF_DONT_IMPLY_DEDENT) ? \ - PyPARSE_DONT_IMPLY_DEDENT : 0) \ - | ((flags)->cf_flags & CO_FUTURE_WITH_STATEMENT ? \ - PyPARSE_WITH_IS_KEYWORD : 0)) : 0) -#endif - /* A PyRun_InteractiveOneObject() auxiliary function that does not print the * error on failure. */ static int @@ -185,7 +155,6 @@ PyRun_InteractiveOneObjectEx(FILE *fp, PyObject *filename, PyArena *arena; const char *ps1 = "", *ps2 = "", *enc = NULL; int errcode = 0; - int use_peg = _PyInterpreterState_GET()->config._use_peg_parser; _Py_IDENTIFIER(encoding); _Py_IDENTIFIER(__main__); @@ -239,15 +208,8 @@ PyRun_InteractiveOneObjectEx(FILE *fp, PyObject *filename, return -1; } - if (use_peg) { - mod = PyPegen_ASTFromFileObject(fp, filename, Py_single_input, - enc, ps1, ps2, flags, &errcode, arena); - } - else { - mod = PyParser_ASTFromFileObject(fp, filename, enc, - Py_single_input, ps1, ps2, - flags, &errcode, arena); - } + mod = PyPegen_ASTFromFileObject(fp, filename, Py_single_input, + enc, ps1, ps2, flags, &errcode, arena); Py_XDECREF(v); Py_XDECREF(w); @@ -1058,7 +1020,6 @@ PyRun_StringFlags(const char *str, int start, PyObject *globals, mod_ty mod; PyArena *arena; PyObject *filename; - int use_peg = _PyInterpreterState_GET()->config._use_peg_parser; filename = _PyUnicode_FromId(&PyId_string); /* borrowed */ if (filename == NULL) @@ -1068,12 +1029,7 @@ PyRun_StringFlags(const char *str, int start, PyObject *globals, if (arena == NULL) return NULL; - if (use_peg) { - mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); - } - else { - mod = PyParser_ASTFromStringObject(str, filename, start, flags, arena); - } + mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); if (mod != NULL) ret = run_mod(mod, filename, globals, locals, flags, arena); @@ -1089,7 +1045,6 @@ PyRun_FileExFlags(FILE *fp, const char *filename_str, int start, PyObject *globa mod_ty mod; PyArena *arena = NULL; PyObject *filename; - int use_peg = _PyInterpreterState_GET()->config._use_peg_parser; filename = PyUnicode_DecodeFSDefault(filename_str); if (filename == NULL) @@ -1099,14 +1054,8 @@ PyRun_FileExFlags(FILE *fp, const char *filename_str, int start, PyObject *globa if (arena == NULL) goto exit; - if (use_peg) { - mod = PyPegen_ASTFromFileObject(fp, filename, start, NULL, NULL, NULL, + mod = PyPegen_ASTFromFileObject(fp, filename, start, NULL, NULL, NULL, flags, NULL, arena); - } - else { - mod = PyParser_ASTFromFileObject(fp, filename, NULL, start, 0, 0, - flags, NULL, arena); - } if (closeit) fclose(fp); @@ -1250,17 +1199,11 @@ Py_CompileStringObject(const char *str, PyObject *filename, int start, { PyCodeObject *co; mod_ty mod; - int use_peg = _PyInterpreterState_GET()->config._use_peg_parser; PyArena *arena = PyArena_New(); if (arena == NULL) return NULL; - if (use_peg) { - mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); - } - else { - mod = PyParser_ASTFromStringObject(str, filename, start, flags, arena); - } + mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); if (mod == NULL) { PyArena_Free(arena); return NULL; @@ -1357,19 +1300,13 @@ _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, int start, Py { struct symtable *st; mod_ty mod; - int use_peg = _PyInterpreterState_GET()->config._use_peg_parser; PyArena *arena; arena = PyArena_New(); if (arena == NULL) return NULL; - if (use_peg) { - mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); - } - else { - mod = PyParser_ASTFromStringObject(str, filename, start, flags, arena); - } + mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); if (mod == NULL) { PyArena_Free(arena); return NULL; @@ -1393,291 +1330,6 @@ Py_SymtableString(const char *str, const char *filename_str, int start) return st; } -/* Preferred access to parser is through AST. */ -mod_ty -PyParser_ASTFromStringObject(const char *s, PyObject *filename, int start, - PyCompilerFlags *flags, PyArena *arena) -{ - mod_ty mod; - PyCompilerFlags localflags = _PyCompilerFlags_INIT; - perrdetail err; - int iflags = PARSER_FLAGS(flags); - if (flags && flags->cf_feature_version < 7) - iflags |= PyPARSE_ASYNC_HACKS; - - node *n = PyParser_ParseStringObject(s, filename, - &_PyParser_Grammar, start, &err, - &iflags); - if (flags == NULL) { - flags = &localflags; - } - if (n) { - flags->cf_flags |= iflags & PyCF_MASK; - mod = PyAST_FromNodeObject(n, flags, filename, arena); - PyNode_Free(n); - } - else { - err_input(&err); - mod = NULL; - } - err_free(&err); - return mod; -} - -mod_ty -PyParser_ASTFromString(const char *s, const char *filename_str, int start, - PyCompilerFlags *flags, PyArena *arena) -{ - PyObject *filename; - mod_ty mod; - filename = PyUnicode_DecodeFSDefault(filename_str); - if (filename == NULL) - return NULL; - mod = PyParser_ASTFromStringObject(s, filename, start, flags, arena); - Py_DECREF(filename); - return mod; -} - -mod_ty -PyParser_ASTFromFileObject(FILE *fp, PyObject *filename, const char* enc, - int start, const char *ps1, - const char *ps2, PyCompilerFlags *flags, int *errcode, - PyArena *arena) -{ - mod_ty mod; - PyCompilerFlags localflags = _PyCompilerFlags_INIT; - perrdetail err; - int iflags = PARSER_FLAGS(flags); - - node *n = PyParser_ParseFileObject(fp, filename, enc, - &_PyParser_Grammar, - start, ps1, ps2, &err, &iflags); - if (flags == NULL) { - flags = &localflags; - } - if (n) { - flags->cf_flags |= iflags & PyCF_MASK; - mod = PyAST_FromNodeObject(n, flags, filename, arena); - PyNode_Free(n); - } - else { - err_input(&err); - if (errcode) - *errcode = err.error; - mod = NULL; - } - err_free(&err); - return mod; -} - -mod_ty -PyParser_ASTFromFile(FILE *fp, const char *filename_str, const char* enc, - int start, const char *ps1, - const char *ps2, PyCompilerFlags *flags, int *errcode, - PyArena *arena) -{ - mod_ty mod; - PyObject *filename; - filename = PyUnicode_DecodeFSDefault(filename_str); - if (filename == NULL) - return NULL; - mod = PyParser_ASTFromFileObject(fp, filename, enc, start, ps1, ps2, - flags, errcode, arena); - Py_DECREF(filename); - return mod; -} - -/* Simplified interface to parsefile -- return node or set exception */ - -node * -PyParser_SimpleParseFileFlags(FILE *fp, const char *filename, int start, int flags) -{ - perrdetail err; - node *n = PyParser_ParseFileFlags(fp, filename, NULL, - &_PyParser_Grammar, - start, NULL, NULL, &err, flags); - if (n == NULL) - err_input(&err); - err_free(&err); - - return n; -} - -/* Simplified interface to parsestring -- return node or set exception */ - -node * -PyParser_SimpleParseStringFlags(const char *str, int start, int flags) -{ - perrdetail err; - node *n = PyParser_ParseStringFlags(str, &_PyParser_Grammar, - start, &err, flags); - if (n == NULL) - err_input(&err); - err_free(&err); - return n; -} - -node * -PyParser_SimpleParseStringFlagsFilename(const char *str, const char *filename, - int start, int flags) -{ - perrdetail err; - node *n = PyParser_ParseStringFlagsFilename(str, filename, - &_PyParser_Grammar, start, &err, flags); - if (n == NULL) - err_input(&err); - err_free(&err); - return n; -} - -/* May want to move a more generalized form of this to parsetok.c or - even parser modules. */ - -void -PyParser_ClearError(perrdetail *err) -{ - err_free(err); -} - -void -PyParser_SetError(perrdetail *err) -{ - err_input(err); -} - -static void -err_free(perrdetail *err) -{ - Py_CLEAR(err->filename); -} - -/* Set the error appropriate to the given input error code (see errcode.h) */ - -static void -err_input(perrdetail *err) -{ - PyObject *v, *w, *errtype, *errtext; - PyObject *msg_obj = NULL; - const char *msg = NULL; - int offset = err->offset; - - errtype = PyExc_SyntaxError; - switch (err->error) { - case E_ERROR: - goto cleanup; - case E_SYNTAX: - errtype = PyExc_IndentationError; - if (err->expected == INDENT) - msg = "expected an indented block"; - else if (err->token == INDENT) - msg = "unexpected indent"; - else if (err->token == DEDENT) - msg = "unexpected unindent"; - else if (err->expected == NOTEQUAL) { - errtype = PyExc_SyntaxError; - msg = "with Barry as BDFL, use '<>' instead of '!='"; - } - else { - errtype = PyExc_SyntaxError; - msg = "invalid syntax"; - } - break; - case E_TOKEN: - msg = "invalid token"; - break; - case E_EOFS: - msg = "EOF while scanning triple-quoted string literal"; - break; - case E_EOLS: - msg = "EOL while scanning string literal"; - break; - case E_INTR: - if (!PyErr_Occurred()) - PyErr_SetNone(PyExc_KeyboardInterrupt); - goto cleanup; - case E_NOMEM: - PyErr_NoMemory(); - goto cleanup; - case E_EOF: - msg = "unexpected EOF while parsing"; - break; - case E_TABSPACE: - errtype = PyExc_TabError; - msg = "inconsistent use of tabs and spaces in indentation"; - break; - case E_OVERFLOW: - msg = "expression too long"; - break; - case E_DEDENT: - errtype = PyExc_IndentationError; - msg = "unindent does not match any outer indentation level"; - break; - case E_TOODEEP: - errtype = PyExc_IndentationError; - msg = "too many levels of indentation"; - break; - case E_DECODE: { - PyObject *type, *value, *tb; - PyErr_Fetch(&type, &value, &tb); - msg = "unknown decode error"; - if (value != NULL) - msg_obj = PyObject_Str(value); - Py_XDECREF(type); - Py_XDECREF(value); - Py_XDECREF(tb); - break; - } - case E_LINECONT: - msg = "unexpected character after line continuation character"; - break; - - case E_BADSINGLE: - msg = "multiple statements found while compiling a single statement"; - break; - default: - fprintf(stderr, "error=%d\n", err->error); - msg = "unknown parsing error"; - break; - } - /* err->text may not be UTF-8 in case of decoding errors. - Explicitly convert to an object. */ - if (!err->text) { - errtext = Py_None; - Py_INCREF(Py_None); - } else { - errtext = PyUnicode_DecodeUTF8(err->text, err->offset, - "replace"); - if (errtext != NULL) { - Py_ssize_t len = strlen(err->text); - offset = (int)PyUnicode_GET_LENGTH(errtext); - if (len != err->offset) { - Py_DECREF(errtext); - errtext = PyUnicode_DecodeUTF8(err->text, len, - "replace"); - } - } - } - v = Py_BuildValue("(OiiN)", err->filename, - err->lineno, offset, errtext); - if (v != NULL) { - if (msg_obj) - w = Py_BuildValue("(OO)", msg_obj, v); - else - w = Py_BuildValue("(sO)", msg, v); - } else - w = NULL; - Py_XDECREF(v); - PyErr_SetObject(errtype, w); - Py_XDECREF(w); -cleanup: - Py_XDECREF(msg_obj); - if (err->text != NULL) { - PyObject_FREE(err->text); - err->text = NULL; - } -} - - #if defined(USE_STACKCHECK) #if defined(WIN32) && defined(_MSC_VER) @@ -1715,123 +1367,6 @@ PyOS_CheckStack(void) #endif /* USE_STACKCHECK */ -/* Deprecated C API functions still provided for binary compatibility */ - -#undef PyParser_SimpleParseFile -PyAPI_FUNC(node *) -PyParser_SimpleParseFile(FILE *fp, const char *filename, int start) -{ - return PyParser_SimpleParseFileFlags(fp, filename, start, 0); -} - -#undef PyParser_SimpleParseString -PyAPI_FUNC(node *) -PyParser_SimpleParseString(const char *str, int start) -{ - return PyParser_SimpleParseStringFlags(str, start, 0); -} - -#undef PyRun_AnyFile -PyAPI_FUNC(int) -PyRun_AnyFile(FILE *fp, const char *name) -{ - return PyRun_AnyFileExFlags(fp, name, 0, NULL); -} - -#undef PyRun_AnyFileEx -PyAPI_FUNC(int) -PyRun_AnyFileEx(FILE *fp, const char *name, int closeit) -{ - return PyRun_AnyFileExFlags(fp, name, closeit, NULL); -} - -#undef PyRun_AnyFileFlags -PyAPI_FUNC(int) -PyRun_AnyFileFlags(FILE *fp, const char *name, PyCompilerFlags *flags) -{ - return PyRun_AnyFileExFlags(fp, name, 0, flags); -} - -#undef PyRun_File -PyAPI_FUNC(PyObject *) -PyRun_File(FILE *fp, const char *p, int s, PyObject *g, PyObject *l) -{ - return PyRun_FileExFlags(fp, p, s, g, l, 0, NULL); -} - -#undef PyRun_FileEx -PyAPI_FUNC(PyObject *) -PyRun_FileEx(FILE *fp, const char *p, int s, PyObject *g, PyObject *l, int c) -{ - return PyRun_FileExFlags(fp, p, s, g, l, c, NULL); -} - -#undef PyRun_FileFlags -PyAPI_FUNC(PyObject *) -PyRun_FileFlags(FILE *fp, const char *p, int s, PyObject *g, PyObject *l, - PyCompilerFlags *flags) -{ - return PyRun_FileExFlags(fp, p, s, g, l, 0, flags); -} - -#undef PyRun_SimpleFile -PyAPI_FUNC(int) -PyRun_SimpleFile(FILE *f, const char *p) -{ - return PyRun_SimpleFileExFlags(f, p, 0, NULL); -} - -#undef PyRun_SimpleFileEx -PyAPI_FUNC(int) -PyRun_SimpleFileEx(FILE *f, const char *p, int c) -{ - return PyRun_SimpleFileExFlags(f, p, c, NULL); -} - - -#undef PyRun_String -PyAPI_FUNC(PyObject *) -PyRun_String(const char *str, int s, PyObject *g, PyObject *l) -{ - return PyRun_StringFlags(str, s, g, l, NULL); -} - -#undef PyRun_SimpleString -PyAPI_FUNC(int) -PyRun_SimpleString(const char *s) -{ - return PyRun_SimpleStringFlags(s, NULL); -} - -#undef Py_CompileString -PyAPI_FUNC(PyObject *) -Py_CompileString(const char *str, const char *p, int s) -{ - return Py_CompileStringExFlags(str, p, s, NULL, -1); -} - -#undef Py_CompileStringFlags -PyAPI_FUNC(PyObject *) -Py_CompileStringFlags(const char *str, const char *p, int s, - PyCompilerFlags *flags) -{ - return Py_CompileStringExFlags(str, p, s, flags, -1); -} - -#undef PyRun_InteractiveOne -PyAPI_FUNC(int) -PyRun_InteractiveOne(FILE *f, const char *p) -{ - return PyRun_InteractiveOneFlags(f, p, NULL); -} - -#undef PyRun_InteractiveLoop -PyAPI_FUNC(int) -PyRun_InteractiveLoop(FILE *f, const char *p) -{ - return PyRun_InteractiveLoopFlags(f, p, NULL); -} - #ifdef __cplusplus } #endif diff --git a/Tools/peg_generator/pegen/build.py b/Tools/peg_generator/pegen/build.py index 931ffc787523b..9edde372e8d13 100644 --- a/Tools/peg_generator/pegen/build.py +++ b/Tools/peg_generator/pegen/build.py @@ -66,15 +66,14 @@ def compile_c_extension( str(MOD_DIR.parent.parent.parent / "Python" / "Python-ast.c"), str(MOD_DIR.parent.parent.parent / "Python" / "asdl.c"), str(MOD_DIR.parent.parent.parent / "Parser" / "tokenizer.c"), - str(MOD_DIR.parent.parent.parent / "Parser" / "pegen" / "pegen.c"), - str(MOD_DIR.parent.parent.parent / "Parser" / "pegen" / "parse_string.c"), + str(MOD_DIR.parent.parent.parent / "Parser" / "pegen.c"), + str(MOD_DIR.parent.parent.parent / "Parser" / "string_parser.c"), str(MOD_DIR.parent / "peg_extension" / "peg_extension.c"), generated_source_path, ], include_dirs=[ str(MOD_DIR.parent.parent.parent / "Include" / "internal"), str(MOD_DIR.parent.parent.parent / "Parser"), - str(MOD_DIR.parent.parent.parent / "Parser" / "pegen"), ], extra_compile_args=extra_compile_args, extra_link_args=extra_link_args, diff --git a/configure.ac b/configure.ac index 30856c8b6883d..3001cc87f951f 100644 --- a/configure.ac +++ b/configure.ac @@ -5438,7 +5438,7 @@ do done AC_SUBST(SRCDIRS) -SRCDIRS="Parser Parser/pegen Objects Python Modules Modules/_io Programs" +SRCDIRS="Parser Objects Python Modules Modules/_io Programs" AC_MSG_CHECKING(for build directories) for dir in $SRCDIRS; do if test ! -d $dir; then diff --git a/setup.py b/setup.py index a220f366e2509..ef2faf613beec 100644 --- a/setup.py +++ b/setup.py @@ -917,9 +917,6 @@ def detect_simple_extensions(self): # select(2); not on ancient System V self.add(Extension('select', ['selectmodule.c'])) - # Fred Drake's interface to the Python parser - self.add(Extension('parser', ['parsermodule.c'])) - # Memory-mapped files (also works on Win32). self.add(Extension('mmap', ['mmapmodule.c'])) From webhook-mailer at python.org Thu Jun 11 13:08:14 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Thu, 11 Jun 2020 17:08:14 -0000 Subject: [Python-checkins] bpo-40939: Remove some extra references to PYTHONOLDPARSER (GH-20815) Message-ID: https://github.com/python/cpython/commit/436b648910c27baf8164a6d46d746d36d8a93478 commit: 436b648910c27baf8164a6d46d746d36d8a93478 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-11T10:08:05-07:00 summary: bpo-40939: Remove some extra references to PYTHONOLDPARSER (GH-20815) Automerge-Triggered-By: @pablogsal files: M Programs/_testembed.c M Tools/scripts/run_tests.py diff --git a/Programs/_testembed.c b/Programs/_testembed.c index 6f38b6247fb89..b60d70be5f71e 100644 --- a/Programs/_testembed.c +++ b/Programs/_testembed.c @@ -485,8 +485,6 @@ static int test_init_from_config(void) config.install_signal_handlers = 0; - putenv("PYTHONOLDPARSER=1"); - /* FIXME: test use_environment */ putenv("PYTHONHASHSEED=42"); @@ -673,7 +671,6 @@ static void set_most_env_vars(void) putenv("PYTHONNOUSERSITE=1"); putenv("PYTHONFAULTHANDLER=1"); putenv("PYTHONIOENCODING=iso8859-1:replace"); - putenv("PYTHONOLDPARSER=1"); putenv("PYTHONPLATLIBDIR=env_platlibdir"); } diff --git a/Tools/scripts/run_tests.py b/Tools/scripts/run_tests.py index bcfa5e943b347..48feb3f778ee8 100644 --- a/Tools/scripts/run_tests.py +++ b/Tools/scripts/run_tests.py @@ -25,9 +25,8 @@ def main(regrtest_args): '-u', # Unbuffered stdout and stderr '-W', 'default', # Warnings set to 'default' '-bb', # Warnings about bytes/bytearray + '-E', # Ignore environment variables ] - if 'PYTHONOLDPARSER' not in os.environ: - args.append('-E') # Ignore environment variables # Allow user-specified interpreter options to override our defaults. args.extend(test.support.args_from_interpreter_flags()) From webhook-mailer at python.org Thu Jun 11 14:03:37 2020 From: webhook-mailer at python.org (kernc) Date: Thu, 11 Jun 2020 18:03:37 -0000 Subject: [Python-checkins] bpo-29620: iterate over a copy of sys.modules (GH-4800) Message-ID: https://github.com/python/cpython/commit/46398fba4d66ad342cf2504ef947b5fb857423b2 commit: 46398fba4d66ad342cf2504ef947b5fb857423b2 branch: master author: kernc committer: GitHub date: 2020-06-11T14:03:29-04:00 summary: bpo-29620: iterate over a copy of sys.modules (GH-4800) unittest.TestCase.assertWarns no longer raises a RuntimeException when accessing a module's ``__warningregistry__`` causes importation of a new module, or when a new module is imported in another thread. Patch by Kernc. files: A Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst M Lib/unittest/case.py M Lib/unittest/test/test_case.py diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index f8bc865ee8203..52eb7d05ed143 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -252,7 +252,7 @@ class _AssertWarnsContext(_AssertRaisesBaseContext): def __enter__(self): # The __warningregistry__'s need to be in a pristine state for tests # to work properly. - for v in sys.modules.values(): + for v in list(sys.modules.values()): if getattr(v, '__warningregistry__', None): v.__warningregistry__ = {} self.warnings_manager = warnings.catch_warnings(record=True) diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py index f855c4dc00b31..3dedcbe6aad5f 100644 --- a/Lib/unittest/test/test_case.py +++ b/Lib/unittest/test/test_case.py @@ -8,6 +8,7 @@ import warnings import weakref import inspect +import types from copy import deepcopy from test import support @@ -1350,6 +1351,20 @@ class MyWarn(Warning): pass self.assertRaises(TypeError, self.assertWarnsRegex, MyWarn, lambda: True) + def testAssertWarnsModifySysModules(self): + # bpo-29620: handle modified sys.modules during iteration + class Foo(types.ModuleType): + @property + def __warningregistry__(self): + sys.modules['@bar@'] = 'bar' + + sys.modules['@foo@'] = Foo('foo') + try: + self.assertWarns(UserWarning, warnings.warn, 'expected') + finally: + del sys.modules['@foo@'] + del sys.modules['@bar@'] + def testAssertRaisesRegexMismatch(self): def Stub(): raise Exception('Unexpected') diff --git a/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst b/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst new file mode 100644 index 0000000000000..d781919504e68 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst @@ -0,0 +1,3 @@ +:func:`~unittest.TestCase.assertWarns` no longer raises a ``RuntimeException`` +when accessing a module's ``__warningregistry__`` causes importation of a new +module, or when a new module is imported in another thread. Patch by Kernc. From webhook-mailer at python.org Thu Jun 11 14:31:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Jun 2020 18:31:56 -0000 Subject: [Python-checkins] bpo-29620: iterate over a copy of sys.modules (GH-4800) (GH-20816) Message-ID: https://github.com/python/cpython/commit/3e499cda47afe2282ca3f1d04151e2c86f2e7e09 commit: 3e499cda47afe2282ca3f1d04151e2c86f2e7e09 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-11T14:31:46-04:00 summary: bpo-29620: iterate over a copy of sys.modules (GH-4800) (GH-20816) unittest.TestCase.assertWarns no longer raises a RuntimeException when accessing a module's ``__warningregistry__`` causes importation of a new module, or when a new module is imported in another thread. (cherry picked from commit 46398fba4d66ad342cf2504ef947b5fb857423b2) Co-authored-by: kernc files: A Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst M Lib/unittest/case.py M Lib/unittest/test/test_case.py diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index e5734b6b7a298..3223c0bff6fa2 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -251,7 +251,7 @@ class _AssertWarnsContext(_AssertRaisesBaseContext): def __enter__(self): # The __warningregistry__'s need to be in a pristine state for tests # to work properly. - for v in sys.modules.values(): + for v in list(sys.modules.values()): if getattr(v, '__warningregistry__', None): v.__warningregistry__ = {} self.warnings_manager = warnings.catch_warnings(record=True) diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py index f855c4dc00b31..3dedcbe6aad5f 100644 --- a/Lib/unittest/test/test_case.py +++ b/Lib/unittest/test/test_case.py @@ -8,6 +8,7 @@ import warnings import weakref import inspect +import types from copy import deepcopy from test import support @@ -1350,6 +1351,20 @@ class MyWarn(Warning): pass self.assertRaises(TypeError, self.assertWarnsRegex, MyWarn, lambda: True) + def testAssertWarnsModifySysModules(self): + # bpo-29620: handle modified sys.modules during iteration + class Foo(types.ModuleType): + @property + def __warningregistry__(self): + sys.modules['@bar@'] = 'bar' + + sys.modules['@foo@'] = Foo('foo') + try: + self.assertWarns(UserWarning, warnings.warn, 'expected') + finally: + del sys.modules['@foo@'] + del sys.modules['@bar@'] + def testAssertRaisesRegexMismatch(self): def Stub(): raise Exception('Unexpected') diff --git a/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst b/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst new file mode 100644 index 0000000000000..d781919504e68 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst @@ -0,0 +1,3 @@ +:func:`~unittest.TestCase.assertWarns` no longer raises a ``RuntimeException`` +when accessing a module's ``__warningregistry__`` causes importation of a new +module, or when a new module is imported in another thread. Patch by Kernc. From webhook-mailer at python.org Thu Jun 11 14:34:50 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Jun 2020 18:34:50 -0000 Subject: [Python-checkins] bpo-29620: iterate over a copy of sys.modules (GH-4800) (GH-20817) Message-ID: https://github.com/python/cpython/commit/f881c869753fb2b1b7aef353416893190251c539 commit: f881c869753fb2b1b7aef353416893190251c539 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-11T14:34:42-04:00 summary: bpo-29620: iterate over a copy of sys.modules (GH-4800) (GH-20817) unittest.TestCase.assertWarns no longer raises a RuntimeException when accessing a module's ``__warningregistry__`` causes importation of a new module, or when a new module is imported in another thread. (cherry picked from commit 46398fba4d66ad342cf2504ef947b5fb857423b2) Co-authored-by: kernc files: A Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst M Lib/unittest/case.py M Lib/unittest/test/test_case.py diff --git a/Lib/unittest/case.py b/Lib/unittest/case.py index 24af29057646a..d65ff1c5fbab8 100644 --- a/Lib/unittest/case.py +++ b/Lib/unittest/case.py @@ -227,7 +227,7 @@ class _AssertWarnsContext(_AssertRaisesBaseContext): def __enter__(self): # The __warningregistry__'s need to be in a pristine state for tests # to work properly. - for v in sys.modules.values(): + for v in list(sys.modules.values()): if getattr(v, '__warningregistry__', None): v.__warningregistry__ = {} self.warnings_manager = warnings.catch_warnings(record=True) diff --git a/Lib/unittest/test/test_case.py b/Lib/unittest/test/test_case.py index 4fac8d5974528..ff541f8cf0512 100644 --- a/Lib/unittest/test/test_case.py +++ b/Lib/unittest/test/test_case.py @@ -8,6 +8,7 @@ import warnings import weakref import inspect +import types from copy import deepcopy from test import support @@ -1352,6 +1353,20 @@ class MyWarn(Warning): pass self.assertRaises(TypeError, self.assertWarnsRegex, MyWarn, lambda: True) + def testAssertWarnsModifySysModules(self): + # bpo-29620: handle modified sys.modules during iteration + class Foo(types.ModuleType): + @property + def __warningregistry__(self): + sys.modules['@bar@'] = 'bar' + + sys.modules['@foo@'] = Foo('foo') + try: + self.assertWarns(UserWarning, warnings.warn, 'expected') + finally: + del sys.modules['@foo@'] + del sys.modules['@bar@'] + def testAssertRaisesRegexMismatch(self): def Stub(): raise Exception('Unexpected') diff --git a/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst b/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst new file mode 100644 index 0000000000000..d781919504e68 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst @@ -0,0 +1,3 @@ +:func:`~unittest.TestCase.assertWarns` no longer raises a ``RuntimeException`` +when accessing a module's ``__warningregistry__`` causes importation of a new +module, or when a new module is imported in another thread. Patch by Kernc. From webhook-mailer at python.org Thu Jun 11 15:04:22 2020 From: webhook-mailer at python.org (MARK SCHWAB) Date: Thu, 11 Jun 2020 19:04:22 -0000 Subject: [Python-checkins] bpo-40626: Support HDF5 in mimetypes (GH-20042) Message-ID: https://github.com/python/cpython/commit/60c2a810e37994fc640c58d0ef45b6843354b770 commit: 60c2a810e37994fc640c58d0ef45b6843354b770 branch: master author: MARK SCHWAB <32745414+schwabm at users.noreply.github.com> committer: GitHub date: 2020-06-11T15:04:13-04:00 summary: bpo-40626: Support HDF5 in mimetypes (GH-20042) Add hdf5 with .h5 file extension See 'Recommendations' section for mime types from the HDF group: https://www.hdfgroup.org/2018/06/citations-for-hdf-data-and-software/ Patch by Mark Schwab. files: A Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst M Lib/mimetypes.py diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index e972ca2e291a0..61bfff1635911 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -448,6 +448,7 @@ def _default_mime_types(): '.dvi' : 'application/x-dvi', '.gtar' : 'application/x-gtar', '.hdf' : 'application/x-hdf', + '.h5' : 'application/x-hdf5', '.latex' : 'application/x-latex', '.mif' : 'application/x-mif', '.cdf' : 'application/x-netcdf', diff --git a/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst b/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst new file mode 100644 index 0000000000000..fe652cd7ee39d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst @@ -0,0 +1 @@ +Add h5 file extension as MIME Type application/x-hdf5, as per HDF Group recommendation for HDF5 formatted data files. Patch contributed by Mark Schwab. From webhook-mailer at python.org Thu Jun 11 15:32:47 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Jun 2020 19:32:47 -0000 Subject: [Python-checkins] bpo-40626: Support HDF5 in mimetypes (GH-20042) Message-ID: https://github.com/python/cpython/commit/a3d6d23a923d62a713b9e4536d5f7ce7313d99c7 commit: a3d6d23a923d62a713b9e4536d5f7ce7313d99c7 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-11T12:32:37-07:00 summary: bpo-40626: Support HDF5 in mimetypes (GH-20042) Add hdf5 with .h5 file extension See 'Recommendations' section for mime types from the HDF group: https://www.hdfgroup.org/2018/06/citations-for-hdf-data-and-software/ Patch by Mark Schwab. (cherry picked from commit 60c2a810e37994fc640c58d0ef45b6843354b770) Co-authored-by: MARK SCHWAB <32745414+schwabm at users.noreply.github.com> files: A Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst M Lib/mimetypes.py diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index f33b658f10e5e..434f5b37c5821 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -447,6 +447,7 @@ def _default_mime_types(): '.dvi' : 'application/x-dvi', '.gtar' : 'application/x-gtar', '.hdf' : 'application/x-hdf', + '.h5' : 'application/x-hdf5', '.latex' : 'application/x-latex', '.mif' : 'application/x-mif', '.cdf' : 'application/x-netcdf', diff --git a/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst b/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst new file mode 100644 index 0000000000000..fe652cd7ee39d --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst @@ -0,0 +1 @@ +Add h5 file extension as MIME Type application/x-hdf5, as per HDF Group recommendation for HDF5 formatted data files. Patch contributed by Mark Schwab. From webhook-mailer at python.org Thu Jun 11 17:48:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 11 Jun 2020 21:48:56 -0000 Subject: [Python-checkins] bpo-40025: Require _generate_next_value_ to be defined before members (GH-19762) Message-ID: https://github.com/python/cpython/commit/ebd44003c9e206755e5e28716242ed8941495a62 commit: ebd44003c9e206755e5e28716242ed8941495a62 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-11T14:48:47-07:00 summary: bpo-40025: Require _generate_next_value_ to be defined before members (GH-19762) require `_generate_next_value_` to be defined before members (cherry picked from commit d9a43e20facdf4ad10186f820601c6580e1baa80) files: A Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst M Doc/library/enum.rst M Lib/enum.py M Lib/test/test_enum.py M Misc/ACKS diff --git a/Doc/library/enum.rst b/Doc/library/enum.rst index 38221199dcdb7..e340f3ba643e7 100644 --- a/Doc/library/enum.rst +++ b/Doc/library/enum.rst @@ -273,6 +273,10 @@ overridden:: the next :class:`int` in sequence with the last :class:`int` provided, but the way it does this is an implementation detail and may change. +.. note:: + + The :meth:`_generate_next_value_` method must be defined before any members. + Iteration --------- diff --git a/Lib/enum.py b/Lib/enum.py index d0b03b4aa56ce..83e6410107ce0 100644 --- a/Lib/enum.py +++ b/Lib/enum.py @@ -66,6 +66,7 @@ def __init__(self): self._member_names = [] self._last_values = [] self._ignore = [] + self._auto_called = False def __setitem__(self, key, value): """Changes anything not dundered or not a descriptor. @@ -83,6 +84,9 @@ def __setitem__(self, key, value): ): raise ValueError('_names_ are reserved for future Enum use') if key == '_generate_next_value_': + # check if members already defined as auto() + if self._auto_called: + raise TypeError("_generate_next_value_ must be defined before members") setattr(self, '_generate_next_value', value) elif key == '_ignore_': if isinstance(value, str): @@ -106,6 +110,7 @@ def __setitem__(self, key, value): # enum overwriting a descriptor? raise TypeError('%r already defined as: %r' % (key, self[key])) if isinstance(value, auto): + self._auto_called = True if value.value == _auto_null: value.value = self._generate_next_value(key, 1, len(self._member_names), self._last_values[:]) value = value.value diff --git a/Lib/test/test_enum.py b/Lib/test/test_enum.py index 29a429ccd998f..d9260f4cb475f 100644 --- a/Lib/test/test_enum.py +++ b/Lib/test/test_enum.py @@ -1710,6 +1710,16 @@ class Color(Enum): self.assertEqual(Color.blue.value, 2) self.assertEqual(Color.green.value, 3) + def test_auto_order(self): + with self.assertRaises(TypeError): + class Color(Enum): + red = auto() + green = auto() + blue = auto() + def _generate_next_value_(name, start, count, last): + return name + + def test_duplicate_auto(self): class Dupes(Enum): first = primero = auto() diff --git a/Misc/ACKS b/Misc/ACKS index ce269edbd51bf..a6da4f1289828 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1185,6 +1185,7 @@ Adam Olsen Bryan Olson Grant Olson Koray Oner +Ethan Onstott Piet van Oostrum Tomas Oppelstrup Jason Orendorff diff --git a/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst b/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst new file mode 100644 index 0000000000000..7b699de4e0726 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst @@ -0,0 +1 @@ +Raise TypeError when _generate_next_value_ is defined after members. Patch by Ethan Onstott. \ No newline at end of file From webhook-mailer at python.org Thu Jun 11 19:51:53 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Thu, 11 Jun 2020 23:51:53 -0000 Subject: [Python-checkins] Remove unnecessary grammar decorations and change header (GH-20819) Message-ID: https://github.com/python/cpython/commit/b4282dd15079ed46edc9d382b21422320a0af94f commit: b4282dd15079ed46edc9d382b21422320a0af94f branch: master author: Pablo Galindo committer: GitHub date: 2020-06-12T00:51:44+01:00 summary: Remove unnecessary grammar decorations and change header (GH-20819) files: M Grammar/python.gram diff --git a/Grammar/python.gram b/Grammar/python.gram index 745c14ebb9803..0dfbeb9598d70 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -1,6 +1,5 @@ -# Simplified grammar for Python +# PEG grammar for Python - at bytecode True @trailer ''' void * _PyPegen_parse(Parser *p) From webhook-mailer at python.org Thu Jun 11 20:55:42 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Fri, 12 Jun 2020 00:55:42 -0000 Subject: [Python-checkins] bpo-40939: Clean and adapt the peg_generator directory after deleting the old parser (GH-20822) Message-ID: https://github.com/python/cpython/commit/756180b4bfa09bb77394a2b3754d331181d4f28c commit: 756180b4bfa09bb77394a2b3754d331181d4f28c branch: master author: Pablo Galindo committer: GitHub date: 2020-06-12T01:55:35+01:00 summary: bpo-40939: Clean and adapt the peg_generator directory after deleting the old parser (GH-20822) files: D Tools/peg_generator/scripts/show_parse.py M Tools/peg_generator/Makefile M Tools/peg_generator/pegen/keywordgen.py M Tools/peg_generator/scripts/benchmark.py M Tools/peg_generator/scripts/find_max_nesting.py M Tools/peg_generator/scripts/test_parse_directory.py diff --git a/Tools/peg_generator/Makefile b/Tools/peg_generator/Makefile index fb727c048b311..6ad9c91b985cb 100644 --- a/Tools/peg_generator/Makefile +++ b/Tools/peg_generator/Makefile @@ -22,7 +22,7 @@ data/xxl.py: build: peg_extension/parse.c -peg_extension/parse.c: $(GRAMMAR) $(TOKENS) pegen/*.py peg_extension/peg_extension.c ../../Parser/pegen/pegen.c ../../Parser/pegen/parse_string.c ../../Parser/pegen/*.h pegen/grammar_parser.py +peg_extension/parse.c: $(GRAMMAR) $(TOKENS) pegen/*.py peg_extension/peg_extension.c ../../Parser/pegen.c ../../Parser/string_parser.c ../../Parser/*.h pegen/grammar_parser.py $(PYTHON) -m pegen -q c $(GRAMMAR) $(TOKENS) -o peg_extension/parse.c --compile-extension clean: @@ -70,18 +70,10 @@ stats: peg_extension/parse.c data/xxl.py time: time_compile time_compile: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=new --target=xxl compile + $(VENVPYTHON) scripts/benchmark.py --target=xxl compile time_parse: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=new --target=xxl parse - -time_old: time_old_compile - -time_old_compile: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=old --target=xxl compile - -time_old_parse: venv data/xxl.py - $(VENVPYTHON) scripts/benchmark.py --parser=old --target=xxl parse + $(VENVPYTHON) scripts/benchmark.py --target=xxl parse time_peg_dir: venv $(VENVPYTHON) scripts/test_parse_directory.py \ diff --git a/Tools/peg_generator/pegen/keywordgen.py b/Tools/peg_generator/pegen/keywordgen.py index 279c34b6dae5b..8684944096654 100644 --- a/Tools/peg_generator/pegen/keywordgen.py +++ b/Tools/peg_generator/pegen/keywordgen.py @@ -41,9 +41,7 @@ def main(): "grammar", type=str, help="The file with the grammar definition in PEG format" ) parser.add_argument( - "tokens_file", - type=argparse.FileType("r"), - help="The file with the token definitions" + "tokens_file", type=argparse.FileType("r"), help="The file with the token definitions" ) parser.add_argument( "keyword_file", @@ -61,9 +59,7 @@ def main(): gen.collect_todo() with args.keyword_file as thefile: - all_keywords = sorted( - list(gen.callmakervisitor.keyword_cache.keys()) + EXTRA_KEYWORDS - ) + all_keywords = sorted(list(gen.callmakervisitor.keyword_cache.keys()) + EXTRA_KEYWORDS) keywords = ",\n ".join(map(repr, all_keywords)) thefile.write(TEMPLATE.format(keywords=keywords)) diff --git a/Tools/peg_generator/scripts/benchmark.py b/Tools/peg_generator/scripts/benchmark.py index af356bed78391..5fbedaa3b0ed1 100644 --- a/Tools/peg_generator/scripts/benchmark.py +++ b/Tools/peg_generator/scripts/benchmark.py @@ -6,13 +6,13 @@ import os from time import time -import _peg_parser - try: import memory_profiler except ModuleNotFoundError: - print("Please run `make venv` to create a virtual environment and install" - " all the dependencies, before running this script.") + print( + "Please run `make venv` to create a virtual environment and install" + " all the dependencies, before running this script." + ) sys.exit(1) sys.path.insert(0, os.getcwd()) @@ -21,13 +21,6 @@ argparser = argparse.ArgumentParser( prog="benchmark", description="Reproduce the various pegen benchmarks" ) -argparser.add_argument( - "--parser", - action="store", - choices=["new", "old"], - default="pegen", - help="Which parser to benchmark (default is pegen)", -) argparser.add_argument( "--target", action="store", @@ -40,12 +33,7 @@ command_compile = subcommands.add_parser( "compile", help="Benchmark parsing and compiling to bytecode" ) -command_parse = subcommands.add_parser( - "parse", help="Benchmark parsing and generating an ast.AST" -) -command_notree = subcommands.add_parser( - "notree", help="Benchmark parsing and dumping the tree" -) +command_parse = subcommands.add_parser("parse", help="Benchmark parsing and generating an ast.AST") def benchmark(func): @@ -66,59 +54,37 @@ def wrapper(*args): @benchmark -def time_compile(source, parser): - if parser == "old": - return _peg_parser.compile_string( - source, - oldparser=True, - ) - else: - return _peg_parser.compile_string(source) - - - at benchmark -def time_parse(source, parser): - if parser == "old": - return _peg_parser.parse_string(source, oldparser=True) - else: - return _peg_parser.parse_string(source) +def time_compile(source): + return compile(source, "", "exec") @benchmark -def time_notree(source, parser): - if parser == "old": - return _peg_parser.parse_string(source, oldparser=True, ast=False) - else: - return _peg_parser.parse_string(source, ast=False) +def time_parse(source): + return ast.parse(source) -def run_benchmark_xxl(subcommand, parser, source): +def run_benchmark_xxl(subcommand, source): if subcommand == "compile": - time_compile(source, parser) + time_compile(source) elif subcommand == "parse": - time_parse(source, parser) - elif subcommand == "notree": - time_notree(source, parser) + time_parse(source) -def run_benchmark_stdlib(subcommand, parser): - modes = {"compile": 2, "parse": 1, "notree": 0} +def run_benchmark_stdlib(subcommand): + modes = {"compile": 2, "parse": 1} for _ in range(3): parse_directory( "../../Lib", verbose=False, excluded_files=["*/bad*", "*/lib2to3/tests/data/*",], - tree_arg=0, short=True, mode=modes[subcommand], - oldparser=(parser == "old"), ) def main(): args = argparser.parse_args() subcommand = args.subcommand - parser = args.parser target = args.target if subcommand is None: @@ -127,9 +93,9 @@ def main(): if target == "xxl": with open(os.path.join("data", "xxl.py"), "r") as f: source = f.read() - run_benchmark_xxl(subcommand, parser, source) + run_benchmark_xxl(subcommand, source) elif target == "stdlib": - run_benchmark_stdlib(subcommand, parser) + run_benchmark_stdlib(subcommand) if __name__ == "__main__": diff --git a/Tools/peg_generator/scripts/find_max_nesting.py b/Tools/peg_generator/scripts/find_max_nesting.py index f2fdd00bfb7cd..92045c93ff76d 100755 --- a/Tools/peg_generator/scripts/find_max_nesting.py +++ b/Tools/peg_generator/scripts/find_max_nesting.py @@ -14,8 +14,7 @@ Usage: python -m scripts.find_max_nesting """ import sys - -from _peg_parser import parse_string +import ast GRAMMAR_FILE = "data/python.gram" INITIAL_NESTING_DEPTH = 10 @@ -28,9 +27,8 @@ def check_nested_expr(nesting_depth: int) -> bool: expr = f"{'(' * nesting_depth}0{')' * nesting_depth}" - try: - parse_string(expr) + ast.parse(expr) print(f"Nesting depth of {nesting_depth} is successful") return True except Exception as err: diff --git a/Tools/peg_generator/scripts/show_parse.py b/Tools/peg_generator/scripts/show_parse.py deleted file mode 100755 index b4ee5a1b357f7..0000000000000 --- a/Tools/peg_generator/scripts/show_parse.py +++ /dev/null @@ -1,121 +0,0 @@ -#!/usr/bin/env python3.8 - -"""Show the parse tree for a given program, nicely formatted. - -Example: - -$ scripts/show_parse.py a+b -Module( - body=[ - Expr( - value=BinOp( - left=Name(id="a", ctx=Load()), op=Add(), right=Name(id="b", ctx=Load()) - ) - ) - ], - type_ignores=[], -) -$ - -Use -v to show line numbers and column offsets. - -The formatting is done using black. You can also import this module -and call one of its functions. -""" - -import argparse -import ast -import difflib -import os -import sys -import tempfile - -import _peg_parser - -from typing import List - -sys.path.insert(0, os.getcwd()) -from pegen.ast_dump import ast_dump - -parser = argparse.ArgumentParser() -parser.add_argument( - "-d", "--diff", action="store_true", help="show diff between grammar and ast (requires -g)" -) -parser.add_argument( - "-p", - "--parser", - choices=["new", "old"], - default="new", - help="choose the parser to use" -) -parser.add_argument( - "-m", - "--multiline", - action="store_true", - help="concatenate program arguments using newline instead of space", -) -parser.add_argument("-v", "--verbose", action="store_true", help="show line/column numbers") -parser.add_argument("program", nargs="+", help="program to parse (will be concatenated)") - - -def format_tree(tree: ast.AST, verbose: bool = False) -> str: - with tempfile.NamedTemporaryFile("w+") as tf: - tf.write(ast_dump(tree, include_attributes=verbose)) - tf.write("\n") - tf.flush() - cmd = f"black -q {tf.name}" - sts = os.system(cmd) - if sts: - raise RuntimeError(f"Command {cmd!r} failed with status 0x{sts:x}") - tf.seek(0) - return tf.read() - - -def diff_trees(a: ast.AST, b: ast.AST, verbose: bool = False) -> List[str]: - sa = format_tree(a, verbose) - sb = format_tree(b, verbose) - la = sa.splitlines() - lb = sb.splitlines() - return list(difflib.unified_diff(la, lb, "a", "b", lineterm="")) - - -def show_parse(source: str, verbose: bool = False) -> str: - tree = _peg_parser.parse_string(source, oldparser=True) - return format_tree(tree, verbose).rstrip("\n") - - -def print_parse(source: str, verbose: bool = False) -> None: - print(show_parse(source, verbose)) - - -def main() -> None: - args = parser.parse_args() - new_parser = args.parser == "new" - if args.multiline: - sep = "\n" - else: - sep = " " - program = sep.join(args.program) - if new_parser: - tree = _peg_parser.parse_string(program) - - if args.diff: - a = _peg_parser.parse_string(program, oldparser=True) - b = tree - diff = diff_trees(a, b, args.verbose) - if diff: - for line in diff: - print(line) - else: - print("# Trees are the same") - else: - print("# Parsed using the new parser") - print(format_tree(tree, args.verbose)) - else: - tree = _peg_parser.parse_string(program, oldparser=True) - print("# Parsed using the old parser") - print(format_tree(tree, args.verbose)) - - -if __name__ == "__main__": - main() diff --git a/Tools/peg_generator/scripts/test_parse_directory.py b/Tools/peg_generator/scripts/test_parse_directory.py index 63204ce9dc193..d8f4f0ecd3e05 100755 --- a/Tools/peg_generator/scripts/test_parse_directory.py +++ b/Tools/peg_generator/scripts/test_parse_directory.py @@ -7,7 +7,6 @@ import time import traceback import tokenize -import _peg_parser from glob import glob from pathlib import PurePath @@ -16,7 +15,6 @@ sys.path.insert(0, os.getcwd()) from pegen.ast_dump import ast_dump from pegen.testutil import print_memstats -from scripts import show_parse SUCCESS = "\033[92m" FAIL = "\033[91m" @@ -40,9 +38,6 @@ argparser.add_argument( "-v", "--verbose", action="store_true", help="Display detailed errors for failures" ) -argparser.add_argument( - "-t", "--tree", action="count", help="Compare parse tree to official AST", default=0 -) def report_status( @@ -79,66 +74,13 @@ def report_status( print(f" {str(error.__class__.__name__)}: {error}") -def compare_trees( - actual_tree: ast.AST, file: str, verbose: bool, include_attributes: bool = False, -) -> int: - with open(file) as f: - expected_tree = _peg_parser.parse_string(f.read(), oldparser=True) - - expected_text = ast_dump(expected_tree, include_attributes=include_attributes) - actual_text = ast_dump(actual_tree, include_attributes=include_attributes) - if actual_text == expected_text: - if verbose: - print("Tree for {file}:") - print(show_parse.format_tree(actual_tree, include_attributes)) - return 0 - - print(f"Diffing ASTs for {file} ...") - - expected = show_parse.format_tree(expected_tree, include_attributes) - actual = show_parse.format_tree(actual_tree, include_attributes) - - if verbose: - print("Expected for {file}:") - print(expected) - print("Actual for {file}:") - print(actual) - print(f"Diff for {file}:") - - diff = show_parse.diff_trees(expected_tree, actual_tree, include_attributes) - for line in diff: - print(line) - - return 1 - - -def parse_file(source: str, file: str, mode: int, oldparser: bool) -> Tuple[Any, float]: +def parse_file(source: str, file: str) -> Tuple[Any, float]: t0 = time.time() - if mode == COMPILE: - result = _peg_parser.compile_string( - source, - filename=file, - oldparser=oldparser, - ) - else: - result = _peg_parser.parse_string( - source, - filename=file, - oldparser=oldparser, - ast=(mode == PARSE), - ) + result = ast.parse(source, filename=file) t1 = time.time() return result, t1 - t0 -def is_parsing_failure(source: str) -> bool: - try: - _peg_parser.parse_string(source, mode="exec", oldparser=True) - except SyntaxError: - return False - return True - - def generate_time_stats(files, total_seconds) -> None: total_files = len(files) total_bytes = 0 @@ -160,27 +102,11 @@ def generate_time_stats(files, total_seconds) -> None: ) -def parse_directory( - directory: str, - verbose: bool, - excluded_files: List[str], - tree_arg: int, - short: bool, - mode: int, - oldparser: bool, -) -> int: - if tree_arg: - assert mode == PARSE, "Mode should be 1 (parse), when comparing the generated trees" - - if oldparser and tree_arg: - print("Cannot specify tree argument with the cpython parser.", file=sys.stderr) - return 1 - +def parse_directory(directory: str, verbose: bool, excluded_files: List[str], short: bool) -> int: # For a given directory, traverse files and attempt to parse each one # - Output success/failure for each file errors = 0 files = [] - trees = {} # Trees to compare (after everything else is done) total_seconds = 0 for file in sorted(glob(f"{directory}/**/*.py", recursive=True)): @@ -192,39 +118,20 @@ def parse_directory( source = f.read() try: - result, dt = parse_file(source, file, mode, oldparser) + result, dt = parse_file(source, file) total_seconds += dt - if tree_arg: - trees[file] = result report_status(succeeded=True, file=file, verbose=verbose, short=short) except SyntaxError as error: - if is_parsing_failure(source): - print(f"File {file} cannot be parsed by either parser.") - else: - report_status( - succeeded=False, file=file, verbose=verbose, error=error, short=short - ) - errors += 1 + report_status(succeeded=False, file=file, verbose=verbose, error=error, short=short) + errors += 1 files.append(file) - t1 = time.time() - generate_time_stats(files, total_seconds) if short: print_memstats() if errors: print(f"Encountered {errors} failures.", file=sys.stderr) - - # Compare trees (the dict is empty unless -t is given) - compare_trees_errors = 0 - for file, tree in trees.items(): - if not short: - print("Comparing ASTs for", file) - if compare_trees(tree, file, verbose, tree_arg >= 2) == 1: - compare_trees_errors += 1 - - if errors or compare_trees_errors: return 1 return 0 @@ -235,20 +142,8 @@ def main() -> None: directory = args.directory verbose = args.verbose excluded_files = args.exclude - tree = args.tree short = args.short - mode = 1 if args.tree else 2 - sys.exit( - parse_directory( - directory, - verbose, - excluded_files, - tree, - short, - mode, - oldparser=False, - ) - ) + sys.exit(parse_directory(directory, verbose, excluded_files, short)) if __name__ == "__main__": From webhook-mailer at python.org Thu Jun 11 22:26:10 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Fri, 12 Jun 2020 02:26:10 -0000 Subject: [Python-checkins] bpo-40950: Port nis module to multiphase initialization (GH-20811) Message-ID: https://github.com/python/cpython/commit/ddef3bdc7b254a7e1129a52c17d79cb7c73a88f5 commit: ddef3bdc7b254a7e1129a52c17d79cb7c73a88f5 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-12T11:26:00+09:00 summary: bpo-40950: Port nis module to multiphase initialization (GH-20811) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-12-00-12-28.bpo-40950.tzMy7m.rst M Modules/nismodule.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-12-00-12-28.bpo-40950.tzMy7m.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-00-12-28.bpo-40950.tzMy7m.rst new file mode 100644 index 0000000000000..925b5790f73f3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-00-12-28.bpo-40950.tzMy7m.rst @@ -0,0 +1,2 @@ +Add a state to the :mod:`nis` module (:pep:`3121`) and apply +the multiphase initialization. Patch by Dong-hee Na. diff --git a/Modules/nismodule.c b/Modules/nismodule.c index a24978e068670..6655451ebd2fb 100644 --- a/Modules/nismodule.c +++ b/Modules/nismodule.c @@ -44,12 +44,42 @@ PyDoc_STRVAR(maps__doc__, Returns an array of all available NIS maps within a domain. If domain\n\ is not specified it defaults to the system default domain.\n"); -static PyObject *NisError; +typedef struct { + PyObject *nis_error; +} nis_state; + +static inline nis_state* +get_nis_state(PyObject *module) +{ + void *state = PyModule_GetState(module); + assert(state != NULL); + return (nis_state *)state; +} + +static int +nis_clear(PyObject *m) +{ + Py_CLEAR(get_nis_state(m)->nis_error); + return 0; +} + +static int +nis_traverse(PyObject *m, visitproc visit, void *arg) +{ + Py_VISIT(get_nis_state(m)->nis_error); + return 0; +} + +static void +nis_free(void *m) +{ + nis_clear((PyObject *) m); +} static PyObject * -nis_error (int err) +nis_error(nis_state *state, int err) { - PyErr_SetString(NisError, yperr_string(err)); + PyErr_SetString(state->nis_error, yperr_string(err)); return NULL; } @@ -70,7 +100,7 @@ static struct nis_map { }; static char * -nis_mapname (char *map, int *pfix) +nis_mapname(char *map, int *pfix) { int i; @@ -98,7 +128,7 @@ struct ypcallback_data { }; static int -nis_foreach (int instatus, char *inkey, int inkeylen, char *inval, +nis_foreach(int instatus, char *inkey, int inkeylen, char *inval, int invallen, struct ypcallback_data *indata) { if (instatus == YP_TRUE) { @@ -137,21 +167,22 @@ nis_foreach (int instatus, char *inkey, int inkeylen, char *inval, } static PyObject * -nis_get_default_domain (PyObject *self, PyObject *Py_UNUSED(ignored)) +nis_get_default_domain(PyObject *module, PyObject *Py_UNUSED(ignored)) { char *domain; int err; PyObject *res; - - if ((err = yp_get_default_domain(&domain)) != 0) - return nis_error(err); + nis_state *state = get_nis_state(module); + if ((err = yp_get_default_domain(&domain)) != 0) { + return nis_error(state, err); + } res = PyUnicode_FromStringAndSize (domain, strlen(domain)); return res; } static PyObject * -nis_match (PyObject *self, PyObject *args, PyObject *kwdict) +nis_match(PyObject *module, PyObject *args, PyObject *kwdict) { char *match; char *domain = NULL; @@ -165,18 +196,22 @@ nis_match (PyObject *self, PyObject *args, PyObject *kwdict) if (!PyArg_ParseTupleAndKeywords(args, kwdict, "Us|s:match", kwlist, - &ukey, &map, &domain)) + &ukey, &map, &domain)) { return NULL; - if ((bkey = PyUnicode_EncodeFSDefault(ukey)) == NULL) + } + if ((bkey = PyUnicode_EncodeFSDefault(ukey)) == NULL) { return NULL; + } /* check for embedded null bytes */ if (PyBytes_AsStringAndSize(bkey, &key, &keylen) == -1) { Py_DECREF(bkey); return NULL; } + + nis_state *state = get_nis_state(module); if (!domain && ((err = yp_get_default_domain(&domain)) != 0)) { Py_DECREF(bkey); - return nis_error(err); + return nis_error(state, err); } map = nis_mapname (map, &fix); if (fix) @@ -187,15 +222,16 @@ nis_match (PyObject *self, PyObject *args, PyObject *kwdict) Py_DECREF(bkey); if (fix) len--; - if (err != 0) - return nis_error(err); + if (err != 0) { + return nis_error(state, err); + } res = PyUnicode_DecodeFSDefaultAndSize(match, len); free (match); return res; } static PyObject * -nis_cat (PyObject *self, PyObject *args, PyObject *kwdict) +nis_cat(PyObject *module, PyObject *args, PyObject *kwdict) { char *domain = NULL; char *map; @@ -206,10 +242,13 @@ nis_cat (PyObject *self, PyObject *args, PyObject *kwdict) static char *kwlist[] = {"map", "domain", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwdict, "s|s:cat", - kwlist, &map, &domain)) + kwlist, &map, &domain)) { return NULL; - if (!domain && ((err = yp_get_default_domain(&domain)) != 0)) - return nis_error(err); + } + nis_state *state = get_nis_state(module); + if (!domain && ((err = yp_get_default_domain(&domain)) != 0)) { + return nis_error(state, err); + } dict = PyDict_New (); if (dict == NULL) return NULL; @@ -222,7 +261,7 @@ nis_cat (PyObject *self, PyObject *args, PyObject *kwdict) PyEval_RestoreThread(data.state); if (err != 0) { Py_DECREF(dict); - return nis_error(err); + return nis_error(state, err); } return dict; } @@ -352,7 +391,7 @@ nisproc_maplist_2(domainname *argp, CLIENT *clnt) static nismaplist * -nis_maplist (char *dom) +nis_maplist(nis_state *state, char *dom) { nisresp_maplist *list; CLIENT *cl; @@ -364,12 +403,12 @@ nis_maplist (char *dom) mapi++; } if (!server) { - PyErr_SetString(NisError, "No NIS master found for any map"); + PyErr_SetString(state->nis_error, "No NIS master found for any map"); return NULL; } cl = clnt_create(server, YPPROG, YPVERS, "tcp"); if (cl == NULL) { - PyErr_SetString(NisError, clnt_spcreateerror(server)); + PyErr_SetString(state->nis_error, clnt_spcreateerror(server)); goto finally; } list = nisproc_maplist_2 (&dom, cl); @@ -388,7 +427,7 @@ nis_maplist (char *dom) } static PyObject * -nis_maps (PyObject *self, PyObject *args, PyObject *kwdict) +nis_maps (PyObject *module, PyObject *args, PyObject *kwdict) { char *domain = NULL; nismaplist *maps; @@ -397,17 +436,22 @@ nis_maps (PyObject *self, PyObject *args, PyObject *kwdict) static char *kwlist[] = {"domain", NULL}; if (!PyArg_ParseTupleAndKeywords(args, kwdict, - "|s:maps", kwlist, &domain)) + "|s:maps", kwlist, &domain)) { return NULL; + } + + nis_state *state = get_nis_state(module); if (!domain && ((err = yp_get_default_domain (&domain)) != 0)) { - nis_error(err); + nis_error(state, err); return NULL; } - if ((maps = nis_maplist (domain)) == NULL) + if ((maps = nis_maplist(state, domain)) == NULL) { return NULL; - if ((list = PyList_New(0)) == NULL) + } + if ((list = PyList_New(0)) == NULL) { return NULL; + } for (; maps; maps = maps->next) { PyObject *str = PyUnicode_FromString(maps->map); if (!str || PyList_Append(list, str) < 0) @@ -439,31 +483,45 @@ static PyMethodDef nis_methods[] = { {NULL, NULL} /* Sentinel */ }; +static int +nis_exec(PyObject *module) +{ + nis_state* state = get_nis_state(module); + state->nis_error = PyErr_NewException("nis.error", NULL, NULL); + if (state->nis_error == NULL) { + return -1; + } + + Py_INCREF(state->nis_error); + if (PyModule_AddObject(module, "error", state->nis_error) < 0) { + Py_DECREF(state->nis_error); + return -1; + } + return 0; +} + +static PyModuleDef_Slot nis_slots[] = { + {Py_mod_exec, nis_exec}, + {0, NULL} +}; + PyDoc_STRVAR(nis__doc__, "This module contains functions for accessing NIS maps.\n"); static struct PyModuleDef nismodule = { PyModuleDef_HEAD_INIT, - "nis", - nis__doc__, - -1, - nis_methods, - NULL, - NULL, - NULL, - NULL + .m_name = "nis", + .m_doc = nis__doc__, + .m_size = sizeof(nis_state), + .m_methods = nis_methods, + .m_traverse = nis_traverse, + .m_clear = nis_clear, + .m_free = nis_free, + .m_slots = nis_slots, }; PyMODINIT_FUNC PyInit_nis(void) { - PyObject *m, *d; - m = PyModule_Create(&nismodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); - NisError = PyErr_NewException("nis.error", NULL, NULL); - if (NisError != NULL) - PyDict_SetItemString(d, "error", NisError); - return m; + return PyModuleDef_Init(&nismodule); } From webhook-mailer at python.org Fri Jun 12 02:20:21 2020 From: webhook-mailer at python.org (native-api) Date: Fri, 12 Jun 2020 06:20:21 -0000 Subject: [Python-checkins] bpo-33944: site: Add site-packages tracing in verbose mode (GH-12110) Message-ID: https://github.com/python/cpython/commit/2145c8c9724287a310bc77a2760d4f1c0ca9eb0c commit: 2145c8c9724287a310bc77a2760d4f1c0ca9eb0c branch: master author: native-api committer: GitHub date: 2020-06-12T15:20:11+09:00 summary: bpo-33944: site: Add site-packages tracing in verbose mode (GH-12110) files: A Misc/NEWS.d/next/Library/2019-03-01-01-56-23.bpo-33944.-82Pkt.rst M Doc/using/cmdline.rst M Lib/site.py M Lib/test/test_site.py diff --git a/Doc/using/cmdline.rst b/Doc/using/cmdline.rst index 7aacd8ffe822e..8c65d99ef31f9 100644 --- a/Doc/using/cmdline.rst +++ b/Doc/using/cmdline.rst @@ -369,6 +369,11 @@ Miscellaneous options (filename or built-in module) from which it is loaded. When given twice (:option:`!-vv`), print a message for each file that is checked for when searching for a module. Also provides information on module cleanup at exit. + + .. versionchanged:: 3.10 + The :mod:`site` module reports the site-specific paths + and :file:`.pth` files being processed. + See also :envvar:`PYTHONVERBOSE`. diff --git a/Lib/site.py b/Lib/site.py index e981a142088fd..544306cd40e32 100644 --- a/Lib/site.py +++ b/Lib/site.py @@ -88,6 +88,11 @@ USER_BASE = None +def _trace(message): + if sys.flags.verbose: + print(message, file=sys.stderr) + + def makepath(*paths): dir = os.path.join(*paths) try: @@ -156,6 +161,7 @@ def addpackage(sitedir, name, known_paths): else: reset = False fullname = os.path.join(sitedir, name) + _trace(f"Processing .pth file: {fullname!r}") try: f = io.TextIOWrapper(io.open_code(fullname)) except OSError: @@ -190,6 +196,7 @@ def addpackage(sitedir, name, known_paths): def addsitedir(sitedir, known_paths=None): """Add 'sitedir' argument to sys.path if missing and handle .pth files in 'sitedir'""" + _trace(f"Adding directory: {sitedir!r}") if known_paths is None: known_paths = _init_pathinfo() reset = True @@ -310,6 +317,7 @@ def addusersitepackages(known_paths): """ # get the per user site-package path # this call will also make sure USER_BASE and USER_SITE are set + _trace("Processing user site-packages") user_site = getusersitepackages() if ENABLE_USER_SITE and os.path.isdir(user_site): @@ -354,6 +362,7 @@ def getsitepackages(prefixes=None): def addsitepackages(known_paths, prefixes=None): """Add site-packages to sys.path""" + _trace("Processing global site-packages") for sitedir in getsitepackages(prefixes): if os.path.isdir(sitedir): addsitedir(sitedir, known_paths) diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py index 957e7a41d5466..9f4a8bc64f7ee 100644 --- a/Lib/test/test_site.py +++ b/Lib/test/test_site.py @@ -13,6 +13,7 @@ import builtins import encodings import glob +import io import os import re import shutil @@ -320,6 +321,14 @@ def test_no_home_directory(self): mock_addsitedir.assert_not_called() self.assertFalse(known_paths) + def test_trace(self): + message = "bla-bla-bla" + for verbose, out in (True, message + "\n"), (False, ""): + with mock.patch('sys.flags', mock.Mock(verbose=verbose)), \ + mock.patch('sys.stderr', io.StringIO()): + site._trace(message) + self.assertEqual(sys.stderr.getvalue(), out) + class PthFile(object): """Helper class for handling testing of .pth files""" diff --git a/Misc/NEWS.d/next/Library/2019-03-01-01-56-23.bpo-33944.-82Pkt.rst b/Misc/NEWS.d/next/Library/2019-03-01-01-56-23.bpo-33944.-82Pkt.rst new file mode 100644 index 0000000000000..b0c953dd6752e --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-03-01-01-56-23.bpo-33944.-82Pkt.rst @@ -0,0 +1 @@ +Added site.py site-packages tracing in verbose mode. From webhook-mailer at python.org Fri Jun 12 10:46:52 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Fri, 12 Jun 2020 14:46:52 -0000 Subject: [Python-checkins] bpo-39035: travis: Update image to xenial (#17623) Message-ID: https://github.com/python/cpython/commit/f88b578949a034f511dd1b4c1c161351b3ee0db8 commit: f88b578949a034f511dd1b4c1c161351b3ee0db8 branch: 3.5 author: Inada Naoki committer: GitHub date: 2020-06-12T07:46:40-07:00 summary: bpo-39035: travis: Update image to xenial (#17623) Use image same to master to ease maintainance. Remove "group: beta" to make Travis more stable. files: M .travis.yml diff --git a/.travis.yml b/.travis.yml index 20912d5440be8..aee8875de1639 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,7 +1,5 @@ language: c -dist: trusty -sudo: false -group: beta +dist: xenial # To cache doc-building dependencies. cache: pip From webhook-mailer at python.org Fri Jun 12 11:33:27 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 12 Jun 2020 15:33:27 -0000 Subject: [Python-checkins] bpo-39073: validate Address parts to disallow CRLF (#19007) (#20450) Message-ID: https://github.com/python/cpython/commit/f91a0b6df14d6c5133fe3d5889fad7d84fc0c046 commit: f91a0b6df14d6c5133fe3d5889fad7d84fc0c046 branch: 3.5 author: Victor Stinner committer: GitHub date: 2020-06-12T08:33:19-07:00 summary: bpo-39073: validate Address parts to disallow CRLF (#19007) (#20450) Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. (cherry picked from commit 614f17211c5fc0e5b828be1d3320661d1038fe8f) files: A Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst M Lib/email/headerregistry.py M Lib/test/test_email/test_headerregistry.py diff --git a/Lib/email/headerregistry.py b/Lib/email/headerregistry.py index 0fc2231e5cbd2..fafd489b2c370 100644 --- a/Lib/email/headerregistry.py +++ b/Lib/email/headerregistry.py @@ -31,6 +31,11 @@ def __init__(self, display_name='', username='', domain='', addr_spec=None): without any Content Transfer Encoding. """ + + inputs = ''.join(filter(None, (display_name, username, domain, addr_spec))) + if '\r' in inputs or '\n' in inputs: + raise ValueError("invalid arguments; address parts cannot contain CR or LF") + # This clause with its potential 'raise' may only happen when an # application program creates an Address object using an addr_spec # keyword. The email library code itself must always supply username diff --git a/Lib/test/test_email/test_headerregistry.py b/Lib/test/test_email/test_headerregistry.py index 55ecdea9aacb5..d6f8119b02f3f 100644 --- a/Lib/test/test_email/test_headerregistry.py +++ b/Lib/test/test_email/test_headerregistry.py @@ -1415,6 +1415,25 @@ def test_il8n(self): # with self.assertRaises(ValueError): # Address('foo', 'w?k', 'example.com') + def test_crlf_in_constructor_args_raises(self): + cases = ( + dict(display_name='foo\r'), + dict(display_name='foo\n'), + dict(display_name='foo\r\n'), + dict(domain='example.com\r'), + dict(domain='example.com\n'), + dict(domain='example.com\r\n'), + dict(username='wok\r'), + dict(username='wok\n'), + dict(username='wok\r\n'), + dict(addr_spec='wok at example.com\r'), + dict(addr_spec='wok at example.com\n'), + dict(addr_spec='wok at example.com\r\n') + ) + for kwargs in cases: + with self.subTest(kwargs=kwargs), self.assertRaisesRegex(ValueError, "invalid arguments"): + Address(**kwargs) + def test_non_ascii_username_in_addr_spec_raises(self): with self.assertRaises(ValueError): Address('foo', addr_spec='w?k at example.com') diff --git a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst new file mode 100644 index 0000000000000..6c9447b897bf6 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst @@ -0,0 +1 @@ +Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. From webhook-mailer at python.org Fri Jun 12 12:18:52 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 12 Jun 2020 16:18:52 -0000 Subject: [Python-checkins] bpo-40955: Fix memory leak in subprocess module (GH-20825) Message-ID: https://github.com/python/cpython/commit/0d3350daa8123a3e16d4a534b6e873eb12c10d7c commit: 0d3350daa8123a3e16d4a534b6e873eb12c10d7c branch: master author: Christian Heimes committer: GitHub date: 2020-06-12T09:18:43-07:00 summary: bpo-40955: Fix memory leak in subprocess module (GH-20825) ``` Direct leak of 8 byte(s) in 1 object(s) allocated from: #0 0x7f008bf19667 in __interceptor_malloc (/lib64/libasan.so.6+0xb0667) #1 0x7f007a0bee4a in subprocess_fork_exec /home/heimes/dev/python/cpython/Modules/_posixsubprocess.c:774 #2 0xe0305b in cfunction_call Objects/methodobject.c:546 ``` Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Library/2020-06-12-11-55-30.bpo-40955.huixCg.rst M Modules/_posixsubprocess.c diff --git a/Misc/NEWS.d/next/Library/2020-06-12-11-55-30.bpo-40955.huixCg.rst b/Misc/NEWS.d/next/Library/2020-06-12-11-55-30.bpo-40955.huixCg.rst new file mode 100644 index 0000000000000..9a9803044ec96 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-12-11-55-30.bpo-40955.huixCg.rst @@ -0,0 +1 @@ +Fix a minor memory leak in :mod:`subprocess` module when extra_groups was specified. diff --git a/Modules/_posixsubprocess.c b/Modules/_posixsubprocess.c index add2962189b1c..5d1691ace4192 100644 --- a/Modules/_posixsubprocess.c +++ b/Modules/_posixsubprocess.c @@ -893,6 +893,7 @@ subprocess_fork_exec(PyObject* self, PyObject *args) if (_enable_gc(need_to_reenable_gc, gc_module)) { pid = -1; } + PyMem_RawFree(groups); Py_XDECREF(preexec_fn_args_tuple); Py_XDECREF(gc_module); From webhook-mailer at python.org Fri Jun 12 13:19:29 2020 From: webhook-mailer at python.org (Dennis Sweeney) Date: Fri, 12 Jun 2020 17:19:29 -0000 Subject: [Python-checkins] bpo-40890: Add `mapping` property to dict views (GH-20749) Message-ID: https://github.com/python/cpython/commit/3ee0e48b0376a710c08eec6f30e4181563b192a2 commit: 3ee0e48b0376a710c08eec6f30e4181563b192a2 branch: master author: Dennis Sweeney <36520290+sweeneyde at users.noreply.github.com> committer: GitHub date: 2020-06-12T10:19:25-07:00 summary: bpo-40890: Add `mapping` property to dict views (GH-20749) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-09-00-20-13.bpo-40890.LoRV-g.rst M Doc/library/stdtypes.rst M Doc/whatsnew/3.10.rst M Lib/test/test_dict.py M Objects/dictobject.c diff --git a/Doc/library/stdtypes.rst b/Doc/library/stdtypes.rst index 2082b849fd05b..7028d240c59eb 100644 --- a/Doc/library/stdtypes.rst +++ b/Doc/library/stdtypes.rst @@ -4622,6 +4622,12 @@ support membership tests: .. versionchanged:: 3.8 Dictionary views are now reversible. +.. describe:: dictview.mapping + + Return a :class:`types.MappingProxyType` that wraps the original + dictionary to which the view refers. + + .. versionadded:: 3.10 Keys views are set-like since their entries are unique and hashable. If all values are hashable, so that ``(key, value)`` pairs are unique and hashable, @@ -4661,6 +4667,12 @@ An example of dictionary view usage:: >>> keys ^ {'sausage', 'juice'} {'juice', 'sausage', 'bacon', 'spam'} + >>> # get back a read-only proxy for the original dictionary + >>> values.mapping + mappingproxy({'eggs': 2, 'sausage': 1, 'bacon': 1, 'spam': 500}) + >>> values.mapping['spam'] + 500 + .. _typecontextmanager: diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 1234b2e6bbf27..629909b79e2aa 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -74,6 +74,11 @@ New Features number of ones in the binary expansion of a given integer, also known as the population count. (Contributed by Niklas Fiekas in :issue:`29882`.) +* The views returned by :meth:`dict.keys`, :meth:`dict.values` and + :meth:`dict.items` now all have a ``mapping`` attribute that gives a + :class:`types.MappingProxyType` object wrapping the original + dictionary. (Contributed by Dennis Sweeney in :issue:`40890`.) + Other Language Changes ====================== diff --git a/Lib/test/test_dict.py b/Lib/test/test_dict.py index 5c08810f879b1..9ff8b7d501aad 100644 --- a/Lib/test/test_dict.py +++ b/Lib/test/test_dict.py @@ -105,6 +105,26 @@ def test_items(self): self.assertRaises(TypeError, d.items, None) self.assertEqual(repr(dict(a=1).items()), "dict_items([('a', 1)])") + def test_views_mapping(self): + mappingproxy = type(type.__dict__) + class Dict(dict): + pass + for cls in [dict, Dict]: + d = cls() + m1 = d.keys().mapping + m2 = d.values().mapping + m3 = d.items().mapping + + for m in [m1, m2, m3]: + self.assertIsInstance(m, mappingproxy) + self.assertEqual(m, d) + + d["foo"] = "bar" + + for m in [m1, m2, m3]: + self.assertIsInstance(m, mappingproxy) + self.assertEqual(m, d) + def test_contains(self): d = {} self.assertNotIn('a', d) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-09-00-20-13.bpo-40890.LoRV-g.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-09-00-20-13.bpo-40890.LoRV-g.rst new file mode 100644 index 0000000000000..eaefc894a13a5 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-09-00-20-13.bpo-40890.LoRV-g.rst @@ -0,0 +1 @@ +Each dictionary view now has a ``mapping`` attribute that provides a :class:`types.MappingProxyType` wrapping the original dictionary. Patch contributed by Dennis Sweeney. \ No newline at end of file diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 1bb8cfdab2b68..48e96a09a5f87 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -4122,6 +4122,23 @@ _PyDictView_New(PyObject *dict, PyTypeObject *type) return (PyObject *)dv; } +static PyObject * +dictview_mapping(PyObject *view) +{ + assert(view != NULL); + assert(PyDictKeys_Check(view) + || PyDictValues_Check(view) + || PyDictItems_Check(view)); + PyObject *mapping = (PyObject *)((_PyDictViewObject *)view)->dv_dict; + return PyDictProxy_New(mapping); +} + +static PyGetSetDef dictview_getset[] = { + {"mapping", (getter)dictview_mapping, (setter)NULL, + "dictionary that this view refers to", NULL}, + {0} +}; + /* TODO(guido): The views objects are not complete: * support more set operations @@ -4635,7 +4652,7 @@ PyTypeObject PyDictKeys_Type = { (getiterfunc)dictkeys_iter, /* tp_iter */ 0, /* tp_iternext */ dictkeys_methods, /* tp_methods */ - 0, + .tp_getset = dictview_getset, }; static PyObject * @@ -4741,7 +4758,7 @@ PyTypeObject PyDictItems_Type = { (getiterfunc)dictitems_iter, /* tp_iter */ 0, /* tp_iternext */ dictitems_methods, /* tp_methods */ - 0, + .tp_getset = dictview_getset, }; static PyObject * @@ -4822,7 +4839,7 @@ PyTypeObject PyDictValues_Type = { (getiterfunc)dictvalues_iter, /* tp_iter */ 0, /* tp_iternext */ dictvalues_methods, /* tp_methods */ - 0, + .tp_getset = dictview_getset, }; static PyObject * From webhook-mailer at python.org Fri Jun 12 15:09:35 2020 From: webhook-mailer at python.org (Ned Deily) Date: Fri, 12 Jun 2020 19:09:35 -0000 Subject: [Python-checkins] bpo-40964: disable remote IMAP tests (GH-20836) Message-ID: https://github.com/python/cpython/commit/adce133378be75bcac4d61fd62a151852555000f commit: adce133378be75bcac4d61fd62a151852555000f branch: 3.8 author: Christian Heimes committer: Ned Deily date: 2020-06-12T15:08:32-04:00 summary: bpo-40964: disable remote IMAP tests (GH-20836) Remote host cyrus.andrew.cmu.edu is blocking incoming connections and is causing test suite to fail. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst M Lib/test/test_imaplib.py diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index 03cffbe39c6c7..06ea86b59c482 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -911,6 +911,7 @@ def test_ssl_verified(self): @unittest.skipUnless( support.is_resource_enabled('network'), 'network resource disabled') + at unittest.skip('cyrus.andrew.cmu.edu blocks connections') class RemoteIMAPTest(unittest.TestCase): host = 'cyrus.andrew.cmu.edu' port = 143 @@ -946,6 +947,7 @@ def test_logout(self): @unittest.skipUnless(ssl, "SSL not available") @unittest.skipUnless( support.is_resource_enabled('network'), 'network resource disabled') + at unittest.skip('cyrus.andrew.cmu.edu blocks connections') class RemoteIMAP_STARTTLSTest(RemoteIMAPTest): def setUp(self): @@ -961,6 +963,7 @@ def test_logincapa(self): @unittest.skipUnless(ssl, "SSL not available") + at unittest.skip('cyrus.andrew.cmu.edu blocks connections') class RemoteIMAP_SSLTest(RemoteIMAPTest): port = 993 imap_class = IMAP4_SSL diff --git a/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst b/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst new file mode 100644 index 0000000000000..abfe4f0da4351 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst @@ -0,0 +1,2 @@ +Disable remote :mod:`imaplib` tests, host cyrus.andrew.cmu.edu is blocking +incoming connections. From webhook-mailer at python.org Fri Jun 12 15:11:56 2020 From: webhook-mailer at python.org (Ned Deily) Date: Fri, 12 Jun 2020 19:11:56 -0000 Subject: [Python-checkins] bpo-40964: disable remote IMAP tests (GH-20836) Message-ID: https://github.com/python/cpython/commit/0abb70ec93bfc6724199b7dd6c89294ce7d83072 commit: 0abb70ec93bfc6724199b7dd6c89294ce7d83072 branch: 3.7 author: Christian Heimes committer: Ned Deily date: 2020-06-12T15:10:43-04:00 summary: bpo-40964: disable remote IMAP tests (GH-20836) Remote host cyrus.andrew.cmu.edu is blocking incoming connections and is causing test suite to fail. Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst M Lib/test/test_imaplib.py diff --git a/Lib/test/test_imaplib.py b/Lib/test/test_imaplib.py index 9305e47ee9931..300a6d7b652b2 100644 --- a/Lib/test/test_imaplib.py +++ b/Lib/test/test_imaplib.py @@ -908,6 +908,7 @@ def test_ssl_verified(self): @unittest.skipUnless( support.is_resource_enabled('network'), 'network resource disabled') + at unittest.skip('cyrus.andrew.cmu.edu blocks connections') class RemoteIMAPTest(unittest.TestCase): host = 'cyrus.andrew.cmu.edu' port = 143 @@ -943,6 +944,7 @@ def test_logout(self): @unittest.skipUnless(ssl, "SSL not available") @unittest.skipUnless( support.is_resource_enabled('network'), 'network resource disabled') + at unittest.skip('cyrus.andrew.cmu.edu blocks connections') class RemoteIMAP_STARTTLSTest(RemoteIMAPTest): def setUp(self): @@ -958,6 +960,7 @@ def test_logincapa(self): @unittest.skipUnless(ssl, "SSL not available") + at unittest.skip('cyrus.andrew.cmu.edu blocks connections') class RemoteIMAP_SSLTest(RemoteIMAPTest): port = 993 imap_class = IMAP4_SSL diff --git a/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst b/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst new file mode 100644 index 0000000000000..abfe4f0da4351 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst @@ -0,0 +1,2 @@ +Disable remote :mod:`imaplib` tests, host cyrus.andrew.cmu.edu is blocking +incoming connections. From webhook-mailer at python.org Fri Jun 12 16:46:46 2020 From: webhook-mailer at python.org (Srinivas Reddy Thatiparthy =?utf-8?q??= =?utf-8?b?KOCwtuCxjeCwsOCxgOCwqOCwv+CwteCwvuCwuOCxjSAg4LCw4LGG4LCh?= =?utf-8?b?4LGN4LCh4LC/IOCwpOCwvuCwn+Cwv+CwquCwsOCxjeCwpOCwvyk=?=) Date: Fri, 12 Jun 2020 20:46:46 -0000 Subject: [Python-checkins] bpo-40164: Update Windows OpenSSL to 1.1.1g (GH-20834) Message-ID: https://github.com/python/cpython/commit/80d827c3cb041ae72b9b0572981c50bdd1fe2cab commit: 80d827c3cb041ae72b9b0572981c50bdd1fe2cab branch: master author: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) committer: GitHub date: 2020-06-12T21:46:36+01:00 summary: bpo-40164: Update Windows OpenSSL to 1.1.1g (GH-20834) files: A Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst M .azure-pipelines/ci.yml M .azure-pipelines/pr.yml M PCbuild/get_externals.bat M PCbuild/python.props diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml index 9b638ddd00460..50dc50a654934 100644 --- a/.azure-pipelines/ci.yml +++ b/.azure-pipelines/ci.yml @@ -61,7 +61,7 @@ jobs: variables: testRunTitle: '$(build.sourceBranchName)-linux' testRunPlatform: linux - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml @@ -118,7 +118,7 @@ jobs: variables: testRunTitle: '$(Build.SourceBranchName)-linux-coverage' testRunPlatform: linux-coverage - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml index 65f23eb62ee09..228f9db4f8ef2 100644 --- a/.azure-pipelines/pr.yml +++ b/.azure-pipelines/pr.yml @@ -61,7 +61,7 @@ jobs: variables: testRunTitle: '$(system.pullRequest.TargetBranch)-linux' testRunPlatform: linux - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml @@ -118,7 +118,7 @@ jobs: variables: testRunTitle: '$(Build.SourceBranchName)-linux-coverage' testRunPlatform: linux-coverage - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml diff --git a/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst b/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst new file mode 100644 index 0000000000000..6390de717d71f --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst @@ -0,0 +1 @@ +Updates Windows OpenSSL to 1.1.1g \ No newline at end of file diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index fa27cdf369d9a..4171fd740c708 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -53,7 +53,7 @@ echo.Fetching external libraries... set libraries= set libraries=%libraries% bzip2-1.0.6 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi -if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1f +if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1g set libraries=%libraries% sqlite-3.31.1.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.9.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.9.0 @@ -77,7 +77,7 @@ echo.Fetching external binaries... set binaries= if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi -if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1f +if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1g if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.9.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 diff --git a/PCbuild/python.props b/PCbuild/python.props index d1d16d61be869..6388d1b642675 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -62,8 +62,8 @@ $(ExternalsDir)libffi\ $(ExternalsDir)libffi\$(ArchName)\ $(libffiOutDir)include - $(ExternalsDir)openssl-1.1.1f\ - $(ExternalsDir)openssl-bin-1.1.1f\$(ArchName)\ + $(ExternalsDir)openssl-1.1.1g\ + $(ExternalsDir)openssl-bin-1.1.1g\$(ArchName)\ $(opensslOutDir)include $(ExternalsDir)\nasm-2.11.06\ $(ExternalsDir)\zlib-1.2.11\ From webhook-mailer at python.org Fri Jun 12 17:15:01 2020 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Jun 2020 21:15:01 -0000 Subject: [Python-checkins] bpo-40164: Update Windows OpenSSL to 1.1.1g (GH-20834) Message-ID: https://github.com/python/cpython/commit/7e57c367d65f3d0219978b465dc00da15ae3724c commit: 7e57c367d65f3d0219978b465dc00da15ae3724c branch: 3.8 author: Steve Dower committer: GitHub date: 2020-06-12T22:14:53+01:00 summary: bpo-40164: Update Windows OpenSSL to 1.1.1g (GH-20834) Co-authored-by: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) files: A Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst M .azure-pipelines/ci.yml M .azure-pipelines/pr.yml M PCbuild/get_externals.bat M PCbuild/python.props diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml index 9b638ddd00460..50dc50a654934 100644 --- a/.azure-pipelines/ci.yml +++ b/.azure-pipelines/ci.yml @@ -61,7 +61,7 @@ jobs: variables: testRunTitle: '$(build.sourceBranchName)-linux' testRunPlatform: linux - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml @@ -118,7 +118,7 @@ jobs: variables: testRunTitle: '$(Build.SourceBranchName)-linux-coverage' testRunPlatform: linux-coverage - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml index 65f23eb62ee09..228f9db4f8ef2 100644 --- a/.azure-pipelines/pr.yml +++ b/.azure-pipelines/pr.yml @@ -61,7 +61,7 @@ jobs: variables: testRunTitle: '$(system.pullRequest.TargetBranch)-linux' testRunPlatform: linux - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml @@ -118,7 +118,7 @@ jobs: variables: testRunTitle: '$(Build.SourceBranchName)-linux-coverage' testRunPlatform: linux-coverage - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml diff --git a/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst b/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst new file mode 100644 index 0000000000000..6390de717d71f --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst @@ -0,0 +1 @@ +Updates Windows OpenSSL to 1.1.1g \ No newline at end of file diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index dca0b4587793b..3ad7ce71ffba1 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -53,7 +53,7 @@ echo.Fetching external libraries... set libraries= set libraries=%libraries% bzip2-1.0.6 if NOT "%IncludeLibffiSrc%"=="false" set libraries=%libraries% libffi-3.3.0-rc0-r1 -if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1f +if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1g set libraries=%libraries% sqlite-3.31.1.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.9.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.9.0 @@ -77,7 +77,7 @@ echo.Fetching external binaries... set binaries= if NOT "%IncludeLibffi%"=="false" set binaries=%binaries% libffi -if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1f +if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1g if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.9.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 diff --git a/PCbuild/python.props b/PCbuild/python.props index d1d16d61be869..6388d1b642675 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -62,8 +62,8 @@ $(ExternalsDir)libffi\ $(ExternalsDir)libffi\$(ArchName)\ $(libffiOutDir)include - $(ExternalsDir)openssl-1.1.1f\ - $(ExternalsDir)openssl-bin-1.1.1f\$(ArchName)\ + $(ExternalsDir)openssl-1.1.1g\ + $(ExternalsDir)openssl-bin-1.1.1g\$(ArchName)\ $(opensslOutDir)include $(ExternalsDir)\nasm-2.11.06\ $(ExternalsDir)\zlib-1.2.11\ From webhook-mailer at python.org Fri Jun 12 17:15:32 2020 From: webhook-mailer at python.org (Steve Dower) Date: Fri, 12 Jun 2020 21:15:32 -0000 Subject: [Python-checkins] bpo-40164: Update Windows OpenSSL to 1.1.1g (GH-20834) Message-ID: https://github.com/python/cpython/commit/617af99312ca36ad5a08db764858caf11c92a2c0 commit: 617af99312ca36ad5a08db764858caf11c92a2c0 branch: 3.7 author: Steve Dower committer: GitHub date: 2020-06-12T22:15:27+01:00 summary: bpo-40164: Update Windows OpenSSL to 1.1.1g (GH-20834) Co-authored-by: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) files: A Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst M .azure-pipelines/ci.yml M .azure-pipelines/pr.yml M PCbuild/get_externals.bat M PCbuild/python.props diff --git a/.azure-pipelines/ci.yml b/.azure-pipelines/ci.yml index 0a3e27b133cae..b9038b982f7dd 100644 --- a/.azure-pipelines/ci.yml +++ b/.azure-pipelines/ci.yml @@ -61,7 +61,7 @@ jobs: variables: testRunTitle: '$(build.sourceBranchName)-linux' testRunPlatform: linux - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml @@ -118,7 +118,7 @@ jobs: variables: testRunTitle: '$(Build.SourceBranchName)-linux-coverage' testRunPlatform: linux-coverage - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml diff --git a/.azure-pipelines/pr.yml b/.azure-pipelines/pr.yml index 5cf4dd7de1fc4..808b5f1c75bea 100644 --- a/.azure-pipelines/pr.yml +++ b/.azure-pipelines/pr.yml @@ -61,7 +61,7 @@ jobs: variables: testRunTitle: '$(system.pullRequest.TargetBranch)-linux' testRunPlatform: linux - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml @@ -118,7 +118,7 @@ jobs: variables: testRunTitle: '$(Build.SourceBranchName)-linux-coverage' testRunPlatform: linux-coverage - openssl_version: 1.1.1f + openssl_version: 1.1.1g steps: - template: ./posix-steps.yml diff --git a/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst b/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst new file mode 100644 index 0000000000000..6390de717d71f --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst @@ -0,0 +1 @@ +Updates Windows OpenSSL to 1.1.1g \ No newline at end of file diff --git a/PCbuild/get_externals.bat b/PCbuild/get_externals.bat index f0e58e64dda53..38fc2756b18d0 100644 --- a/PCbuild/get_externals.bat +++ b/PCbuild/get_externals.bat @@ -49,7 +49,7 @@ echo.Fetching external libraries... set libraries= set libraries=%libraries% bzip2-1.0.6 -if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1f +if NOT "%IncludeSSLSrc%"=="false" set libraries=%libraries% openssl-1.1.1g set libraries=%libraries% sqlite-3.31.1.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tcl-core-8.6.9.0 if NOT "%IncludeTkinterSrc%"=="false" set libraries=%libraries% tk-8.6.9.0 @@ -72,7 +72,7 @@ for %%e in (%libraries%) do ( echo.Fetching external binaries... set binaries= -if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1f +if NOT "%IncludeSSL%"=="false" set binaries=%binaries% openssl-bin-1.1.1g if NOT "%IncludeTkinter%"=="false" set binaries=%binaries% tcltk-8.6.9.0 if NOT "%IncludeSSLSrc%"=="false" set binaries=%binaries% nasm-2.11.06 diff --git a/PCbuild/python.props b/PCbuild/python.props index b68191ba754d0..8be1daa696323 100644 --- a/PCbuild/python.props +++ b/PCbuild/python.props @@ -49,8 +49,8 @@ $(ExternalsDir)sqlite-3.31.1.0\ $(ExternalsDir)bzip2-1.0.6\ $(ExternalsDir)xz-5.2.2\ - $(ExternalsDir)openssl-1.1.1f\ - $(ExternalsDir)openssl-bin-1.1.1f\$(ArchName)\ + $(ExternalsDir)openssl-1.1.1g\ + $(ExternalsDir)openssl-bin-1.1.1g\$(ArchName)\ $(opensslOutDir)include $(ExternalsDir)\nasm-2.11.06\ $(ExternalsDir)\zlib-1.2.11\ From webhook-mailer at python.org Fri Jun 12 18:50:27 2020 From: webhook-mailer at python.org (Antoine) Date: Fri, 12 Jun 2020 22:50:27 -0000 Subject: [Python-checkins] Minor change on threading.Thread.native_id documentation. (GH-18129) Message-ID: https://github.com/python/cpython/commit/a6ac2391627aa3760ed9e8cf4f7e6e82878e5035 commit: a6ac2391627aa3760ed9e8cf4f7e6e82878e5035 branch: master author: Antoine <43954001+awecx at users.noreply.github.com> committer: GitHub date: 2020-06-12T15:50:18-07:00 summary: Minor change on threading.Thread.native_id documentation. (GH-18129) Remove duplication in `threading.Thread.native_id` documentation, so resulting documentation is more consistent with the `threading.Thread.ident`. Issue initially raised [here](https://github.com/python/python-docs-fr/pull/1122#discussion_r369236634) (in French). No issue associated to this PR. Automerge-Triggered-By: @csabella files: M Doc/library/threading.rst diff --git a/Doc/library/threading.rst b/Doc/library/threading.rst index 3a446adfac8c5..458e39bf721c6 100644 --- a/Doc/library/threading.rst +++ b/Doc/library/threading.rst @@ -349,13 +349,12 @@ since it is impossible to detect the termination of alien threads. .. attribute:: native_id - The native integral thread ID of this thread. + The Thread ID (``TID``) of this thread, as assigned by the OS (kernel). This is a non-negative integer, or ``None`` if the thread has not been started. See the :func:`get_native_id` function. - This represents the Thread ID (``TID``) as assigned to the - thread by the OS (kernel). Its value may be used to uniquely identify - this particular thread system-wide (until the thread terminates, - after which the value may be recycled by the OS). + This value may be used to uniquely identify this particular thread + system-wide (until the thread terminates, after which the value + may be recycled by the OS). .. note:: From webhook-mailer at python.org Fri Jun 12 19:19:42 2020 From: webhook-mailer at python.org (Steve (Gadget) Barnes) Date: Fri, 12 Jun 2020 23:19:42 -0000 Subject: [Python-checkins] bpo-37556 Extend help to include latest overrides (GH-14701) Message-ID: https://github.com/python/cpython/commit/b3e6783423f58597419abae343dd1d5dcc02a7e3 commit: b3e6783423f58597419abae343dd1d5dcc02a7e3 branch: master author: Steve (Gadget) Barnes committer: GitHub date: 2020-06-12T16:19:34-07:00 summary: bpo-37556 Extend help to include latest overrides (GH-14701) Modify the help in cpython/PC/launcher.c to show users that "latest" can be overridden by shebang, PY_PYTHON[n] or py.ini files. Also show that script [args] is optional by enclosing in square brackets. Automerge-Triggered-By: @zooba files: A Misc/NEWS.d/next/Windows/2019-07-11-06-11-09.bpo-37556.sygMUU.rst M PC/launcher.c diff --git a/Misc/NEWS.d/next/Windows/2019-07-11-06-11-09.bpo-37556.sygMUU.rst b/Misc/NEWS.d/next/Windows/2019-07-11-06-11-09.bpo-37556.sygMUU.rst new file mode 100644 index 0000000000000..e8af96421b845 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2019-07-11-06-11-09.bpo-37556.sygMUU.rst @@ -0,0 +1 @@ +Extend py.exe help to mention overrides via venv, shebang, environmental variables & ini files. \ No newline at end of file diff --git a/PC/launcher.c b/PC/launcher.c index fd5ad0ab1a1d9..106fc660755f7 100644 --- a/PC/launcher.c +++ b/PC/launcher.c @@ -1520,7 +1520,7 @@ show_help_text(wchar_t ** argv) Python Launcher for Windows Version %ls\n\n", version_text); fwprintf(stdout, L"\ usage:\n\ -%ls [launcher-args] [python-args] script [script-args]\n\n", argv[0]); +%ls [launcher-args] [python-args] [script [script-args]]\n\n", argv[0]); fputws(L"\ Launcher arguments:\n\n\ -2 : Launch the latest Python 2.x version\n\ @@ -1536,6 +1536,15 @@ Launcher arguments:\n\n\ } fputws(L"\n-0 --list : List the available pythons", stdout); fputws(L"\n-0p --list-paths : List with paths", stdout); + fputws(L"\n\n If no script is specified the specified interpreter is opened.", stdout); + fputws(L"\nIf an exact version is not given, using the latest version can be overridden by", stdout); + fputws(L"\nany of the following, (in priority order):", stdout); + fputws(L"\n An active virtual environment", stdout); + fputws(L"\n A shebang line in the script (if present)", stdout); + fputws(L"\n With -2 or -3 flag a matching PY_PYTHON2 or PY_PYTHON3 Enviroment variable", stdout); + fputws(L"\n A PY_PYTHON Enviroment variable", stdout); + fputws(L"\n From [defaults] in py.ini in your %LOCALAPPDATA%\\py.ini", stdout); + fputws(L"\n From [defaults] in py.ini beside py.exe (use `where py` to locate)", stdout); fputws(L"\n\nThe following help text is from Python:\n\n", stdout); fflush(stdout); } From webhook-mailer at python.org Sat Jun 13 02:58:24 2020 From: webhook-mailer at python.org (Hai Shi) Date: Sat, 13 Jun 2020 06:58:24 -0000 Subject: [Python-checkins] Remove redundant var in PyErr_NewException() (GH-20850) Message-ID: https://github.com/python/cpython/commit/1c209e3b53b6929747fe3b79398bfbaeabb72d92 commit: 1c209e3b53b6929747fe3b79398bfbaeabb72d92 branch: master author: Hai Shi committer: GitHub date: 2020-06-13T15:58:12+09:00 summary: Remove redundant var in PyErr_NewException() (GH-20850) files: M Python/errors.c diff --git a/Python/errors.c b/Python/errors.c index 5d1725679c4bd..cc00ae4a5407b 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -1079,7 +1079,6 @@ PyErr_NewException(const char *name, PyObject *base, PyObject *dict) { PyThreadState *tstate = _PyThreadState_GET(); PyObject *modulename = NULL; - PyObject *classname = NULL; PyObject *mydict = NULL; PyObject *bases = NULL; PyObject *result = NULL; @@ -1125,7 +1124,6 @@ PyErr_NewException(const char *name, PyObject *base, PyObject *dict) failure: Py_XDECREF(bases); Py_XDECREF(mydict); - Py_XDECREF(classname); Py_XDECREF(modulename); return result; } From webhook-mailer at python.org Sat Jun 13 08:26:28 2020 From: webhook-mailer at python.org (An Long) Date: Sat, 13 Jun 2020 12:26:28 -0000 Subject: [Python-checkins] bpo-40834: Fix truncate when sending str object with channel (GH-20555) Message-ID: https://github.com/python/cpython/commit/29c117202e386bad1d66ae336e2fefa1a1809ee0 commit: 29c117202e386bad1d66ae336e2fefa1a1809ee0 branch: master author: An Long committer: GitHub date: 2020-06-13T05:26:01-07:00 summary: bpo-40834: Fix truncate when sending str object with channel (GH-20555) files: A Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst M Lib/test/test__xxsubinterpreters.py M Python/pystate.c diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index e17bfde2c2f75..7aec021fb19a5 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -378,6 +378,9 @@ def test_bytes(self): self._assert_values(i.to_bytes(2, 'little', signed=True) for i in range(-1, 258)) + def test_strs(self): + self._assert_values(['hello world', '????', '']) + def test_int(self): self._assert_values(itertools.chain(range(-1, 258), [sys.maxsize, -sys.maxsize - 1])) diff --git a/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst b/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst new file mode 100644 index 0000000000000..272783773ff94 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst @@ -0,0 +1 @@ +Fix truncate when sending str object with_xxsubinterpreters.channel_send. \ No newline at end of file diff --git a/Python/pystate.c b/Python/pystate.c index 72d8b36342517..d0cbf5cb8364b 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1726,7 +1726,7 @@ _str_shared(PyObject *obj, _PyCrossInterpreterData *data) struct _shared_str_data *shared = PyMem_NEW(struct _shared_str_data, 1); shared->kind = PyUnicode_KIND(obj); shared->buffer = PyUnicode_DATA(obj); - shared->len = PyUnicode_GET_LENGTH(obj) - 1; + shared->len = PyUnicode_GET_LENGTH(obj); data->data = (void *)shared; Py_INCREF(obj); data->obj = obj; // Will be "released" (decref'ed) when data released. From webhook-mailer at python.org Sat Jun 13 08:44:55 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Jun 2020 12:44:55 -0000 Subject: [Python-checkins] bpo-40834: Fix truncate when sending str object with channel (GH-20555) Message-ID: https://github.com/python/cpython/commit/26db10a431bf5b55340f4427bf015719e384d306 commit: 26db10a431bf5b55340f4427bf015719e384d306 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-13T05:44:50-07:00 summary: bpo-40834: Fix truncate when sending str object with channel (GH-20555) (cherry picked from commit 29c117202e386bad1d66ae336e2fefa1a1809ee0) Co-authored-by: An Long files: A Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst M Lib/test/test__xxsubinterpreters.py M Python/pystate.c diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 30f8f98acc9dd..f14868ad8cb35 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -392,6 +392,9 @@ def test_bytes(self): self._assert_values(i.to_bytes(2, 'little', signed=True) for i in range(-1, 258)) + def test_strs(self): + self._assert_values(['hello world', '????', '']) + def test_int(self): self._assert_values(itertools.chain(range(-1, 258), [sys.maxsize, -sys.maxsize - 1])) diff --git a/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst b/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst new file mode 100644 index 0000000000000..272783773ff94 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst @@ -0,0 +1 @@ +Fix truncate when sending str object with_xxsubinterpreters.channel_send. \ No newline at end of file diff --git a/Python/pystate.c b/Python/pystate.c index 3e1085568b61a..b1d0f1cbec428 100644 --- a/Python/pystate.c +++ b/Python/pystate.c @@ -1639,7 +1639,7 @@ _str_shared(PyObject *obj, _PyCrossInterpreterData *data) struct _shared_str_data *shared = PyMem_NEW(struct _shared_str_data, 1); shared->kind = PyUnicode_KIND(obj); shared->buffer = PyUnicode_DATA(obj); - shared->len = PyUnicode_GET_LENGTH(obj) - 1; + shared->len = PyUnicode_GET_LENGTH(obj); data->data = (void *)shared; Py_INCREF(obj); data->obj = obj; // Will be "released" (decref'ed) when data released. From webhook-mailer at python.org Sat Jun 13 11:57:30 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Sat, 13 Jun 2020 15:57:30 -0000 Subject: [Python-checkins] bpo-40957: Fix refleak in _Py_fopen_obj() (GH-20827) Message-ID: https://github.com/python/cpython/commit/9672912e8f90374fd31b37ca0fb7cefbc6f4c555 commit: 9672912e8f90374fd31b37ca0fb7cefbc6f4c555 branch: master author: Christian Heimes committer: GitHub date: 2020-06-14T00:57:22+09:00 summary: bpo-40957: Fix refleak in _Py_fopen_obj() (GH-20827) Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst M Python/fileutils.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst new file mode 100644 index 0000000000000..f99c374f94aac --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst @@ -0,0 +1 @@ +Fix refleak in _Py_fopen_obj() when PySys_Audit() fails diff --git a/Python/fileutils.c b/Python/fileutils.c index 439bc351596f7..22e72bdd5a9c7 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -1461,6 +1461,7 @@ _Py_fopen_obj(PyObject *path, const char *mode) path_bytes = PyBytes_AS_STRING(bytes); if (PySys_Audit("open", "Osi", path, mode, 0) < 0) { + Py_DECREF(bytes); return NULL; } From webhook-mailer at python.org Sat Jun 13 12:15:13 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Jun 2020 16:15:13 -0000 Subject: [Python-checkins] bpo-40957: Fix refleak in _Py_fopen_obj() (GH-20827) Message-ID: https://github.com/python/cpython/commit/a8936fa5c09c039ad457dda8bdf733be6cc182e3 commit: a8936fa5c09c039ad457dda8bdf733be6cc182e3 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-13T09:15:05-07:00 summary: bpo-40957: Fix refleak in _Py_fopen_obj() (GH-20827) Signed-off-by: Christian Heimes (cherry picked from commit 9672912e8f90374fd31b37ca0fb7cefbc6f4c555) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst M Python/fileutils.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst new file mode 100644 index 0000000000000..f99c374f94aac --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst @@ -0,0 +1 @@ +Fix refleak in _Py_fopen_obj() when PySys_Audit() fails diff --git a/Python/fileutils.c b/Python/fileutils.c index e79e732d1f55b..1021ddb58853e 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -1461,6 +1461,7 @@ _Py_fopen_obj(PyObject *path, const char *mode) path_bytes = PyBytes_AS_STRING(bytes); if (PySys_Audit("open", "Osi", path, mode, 0) < 0) { + Py_DECREF(bytes); return NULL; } From webhook-mailer at python.org Sat Jun 13 12:46:52 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sat, 13 Jun 2020 16:46:52 -0000 Subject: [Python-checkins] Minor code clean-ups (GH-20838) Message-ID: https://github.com/python/cpython/commit/9db5b8d44858d134b0b225df481b784d7511dbd4 commit: 9db5b8d44858d134b0b225df481b784d7511dbd4 branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-13T09:46:47-07:00 summary: Minor code clean-ups (GH-20838) files: M Lib/random.py diff --git a/Lib/random.py b/Lib/random.py index 75f70d5d699ed..02a56c6935b89 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -39,7 +39,7 @@ from warnings import warn as _warn from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil -from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin +from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin, tau as TWOPI from os import urandom as _urandom from _collections_abc import Set as _Set, Sequence as _Sequence from itertools import accumulate as _accumulate, repeat as _repeat @@ -54,19 +54,38 @@ from hashlib import sha512 as _sha512 -__all__ = ["Random","seed","random","uniform","randint","choice","sample", - "randrange","shuffle","normalvariate","lognormvariate", - "expovariate","vonmisesvariate","gammavariate","triangular", - "gauss","betavariate","paretovariate","weibullvariate", - "getstate","setstate", "getrandbits", "choices", - "SystemRandom"] - -NV_MAGICCONST = 4 * _exp(-0.5)/_sqrt(2.0) -TWOPI = 2.0*_pi +__all__ = [ + "Random", + "SystemRandom", + "betavariate", + "choice", + "choices", + "expovariate", + "gammavariate", + "gauss", + "getrandbits", + "getstate", + "lognormvariate", + "normalvariate", + "paretovariate", + "randint", + "random", + "randrange", + "sample", + "seed", + "setstate", + "shuffle", + "triangular", + "uniform", + "vonmisesvariate", + "weibullvariate", +] + +NV_MAGICCONST = 4 * _exp(-0.5) / _sqrt(2.0) LOG4 = _log(4.0) SG_MAGICCONST = 1.0 + _log(4.5) BPF = 53 # Number of bits in a float -RECIP_BPF = 2**-BPF +RECIP_BPF = 2 ** -BPF # Translated by Guido van Rossum from C source provided by @@ -75,6 +94,7 @@ import _random + class Random(_random.Random): """Random number generator base class used by bound module functions. @@ -180,7 +200,7 @@ def setstate(self, state): # really unsigned 32-bit ints, so we convert negative ints from # version 2 to positive longs for version 3. try: - internalstate = tuple(x % (2**32) for x in internalstate) + internalstate = tuple(x % (2 ** 32) for x in internalstate) except ValueError as e: raise TypeError from e super().setstate(internalstate) @@ -189,21 +209,21 @@ def setstate(self, state): "Random.setstate() of version %s" % (version, self.VERSION)) -## ---- Methods below this point do not need to be overridden when -## ---- subclassing for the purpose of using a different core generator. + ## ---- Methods below this point do not need to be overridden when + ## ---- subclassing for the purpose of using a different core generator. -## -------------------- bytes methods --------------------- + ## -------------------- bytes methods --------------------- def randbytes(self, n): """Generate n random bytes.""" return self.getrandbits(n * 8).to_bytes(n, 'little') -## -------------------- pickle support ------------------- + ## -------------------- pickle support ------------------- # Issue 17489: Since __reduce__ was defined to fix #759889 this is no # longer called; we leave it here because it has been here since random was # rewritten back in 2001 and why risk breaking something. - def __getstate__(self): # for pickle + def __getstate__(self): # for pickle return self.getstate() def __setstate__(self, state): # for pickle @@ -212,7 +232,7 @@ def __setstate__(self, state): # for pickle def __reduce__(self): return self.__class__, (), self.getstate() -## -------------------- integer methods ------------------- + ## -------------------- integer methods ------------------- def randrange(self, start, stop=None, step=1, _int=int): """Choose a random item from range(start, stop[, step]). @@ -256,7 +276,7 @@ def randrange(self, start, stop=None, step=1, _int=int): if n <= 0: raise ValueError("empty range for randrange()") - return istart + istep*self._randbelow(n) + return istart + istep * self._randbelow(n) def randint(self, a, b): """Return random integer in range [a, b], including both end points. @@ -271,7 +291,7 @@ def _randbelow_with_getrandbits(self, n): return 0 getrandbits = self.getrandbits k = n.bit_length() # don't use (n-1) here because n can be 1 - r = getrandbits(k) # 0 <= r < 2**k + r = getrandbits(k) # 0 <= r < 2**k while r >= n: r = getrandbits(k) return r @@ -295,15 +315,16 @@ def _randbelow_without_getrandbits(self, n, int=int, maxsize=1<= limit: r = random() - return int(r*maxsize) % n + return int(r * maxsize) % n _randbelow = _randbelow_with_getrandbits -## -------------------- sequence methods ------------------- + ## -------------------- sequence methods ------------------- def choice(self, seq): """Choose a random element from a non-empty sequence.""" - return seq[self._randbelow(len(seq))] # raises IndexError if seq is empty + # raises IndexError if seq is empty + return seq[self._randbelow(len(seq))] def shuffle(self, x, random=None): """Shuffle list x in place, and return None. @@ -318,7 +339,7 @@ def shuffle(self, x, random=None): randbelow = self._randbelow for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] - j = randbelow(i+1) + j = randbelow(i + 1) x[i], x[j] = x[j], x[i] else: _warn('The *random* parameter to shuffle() has been deprecated\n' @@ -328,7 +349,7 @@ def shuffle(self, x, random=None): _int = int for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] - j = _int(random() * (i+1)) + j = _int(random() * (i + 1)) x[i], x[j] = x[j], x[i] def sample(self, population, k, *, counts=None): @@ -410,14 +431,15 @@ def sample(self, population, k, *, counts=None): result = [None] * k setsize = 21 # size of a small set minus size of an empty list if k > 5: - setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets + setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets if n <= setsize: - # An n-length list is smaller than a k-length set + # An n-length list is smaller than a k-length set. + # Invariant: non-selected at pool[0 : n-i] pool = list(population) - for i in range(k): # invariant: non-selected at [0,n-i) - j = randbelow(n-i) + for i in range(k): + j = randbelow(n - i) result[i] = pool[j] - pool[j] = pool[n-i-1] # move non-selected item into vacancy + pool[j] = pool[n - i - 1] # move non-selected item into vacancy else: selected = set() selected_add = selected.add @@ -456,15 +478,15 @@ def choices(self, population, weights=None, *, cum_weights=None, k=1): return [population[bisect(cum_weights, random() * total, 0, hi)] for i in _repeat(None, k)] -## -------------------- real-valued distributions ------------------- + ## -------------------- real-valued distributions ------------------- -## -------------------- uniform distribution ------------------- + ## -------------------- uniform distribution ------------------- def uniform(self, a, b): "Get a random number in the range [a, b) or [a, b] depending on rounding." - return a + (b-a) * self.random() + return a + (b - a) * self.random() -## -------------------- triangular -------------------- + ## -------------------- triangular -------------------- def triangular(self, low=0.0, high=1.0, mode=None): """Triangular distribution. @@ -486,7 +508,7 @@ def triangular(self, low=0.0, high=1.0, mode=None): low, high = high, low return low + (high - low) * _sqrt(u * c) -## -------------------- normal distribution -------------------- + ## -------------------- normal distribution -------------------- def normalvariate(self, mu, sigma): """Normal distribution. @@ -502,16 +524,16 @@ def normalvariate(self, mu, sigma): # Math Software, 3, (1977), pp257-260. random = self.random - while 1: + while True: u1 = random() u2 = 1.0 - random() - z = NV_MAGICCONST*(u1-0.5)/u2 - zz = z*z/4.0 + z = NV_MAGICCONST * (u1 - 0.5) / u2 + zz = z * z / 4.0 if zz <= -_log(u2): break - return mu + z*sigma + return mu + z * sigma -## -------------------- lognormal distribution -------------------- + ## -------------------- lognormal distribution -------------------- def lognormvariate(self, mu, sigma): """Log normal distribution. @@ -523,7 +545,7 @@ def lognormvariate(self, mu, sigma): """ return _exp(self.normalvariate(mu, sigma)) -## -------------------- exponential distribution -------------------- + ## -------------------- exponential distribution -------------------- def expovariate(self, lambd): """Exponential distribution. @@ -540,9 +562,9 @@ def expovariate(self, lambd): # we use 1-random() instead of random() to preclude the # possibility of taking the log of zero. - return -_log(1.0 - self.random())/lambd + return -_log(1.0 - self.random()) / lambd -## -------------------- von Mises distribution -------------------- + ## -------------------- von Mises distribution -------------------- def vonmisesvariate(self, mu, kappa): """Circular data distribution. @@ -571,7 +593,7 @@ def vonmisesvariate(self, mu, kappa): s = 0.5 / kappa r = s + _sqrt(1.0 + s * s) - while 1: + while True: u1 = random() z = _cos(_pi * u1) @@ -590,7 +612,7 @@ def vonmisesvariate(self, mu, kappa): return theta -## -------------------- gamma distribution -------------------- + ## -------------------- gamma distribution -------------------- def gammavariate(self, alpha, beta): """Gamma distribution. Not the gamma function! @@ -625,32 +647,31 @@ def gammavariate(self, alpha, beta): while 1: u1 = random() - if not 1e-7 < u1 < .9999999: + if not 1e-7 < u1 < 0.9999999: continue u2 = 1.0 - random() - v = _log(u1/(1.0-u1))/ainv - x = alpha*_exp(v) - z = u1*u1*u2 - r = bbb+ccc*v-x - if r + SG_MAGICCONST - 4.5*z >= 0.0 or r >= _log(z): + v = _log(u1 / (1.0 - u1)) / ainv + x = alpha * _exp(v) + z = u1 * u1 * u2 + r = bbb + ccc * v - x + if r + SG_MAGICCONST - 4.5 * z >= 0.0 or r >= _log(z): return x * beta elif alpha == 1.0: # expovariate(1/beta) return -_log(1.0 - random()) * beta - else: # alpha is between 0 and 1 (exclusive) - + else: + # alpha is between 0 and 1 (exclusive) # Uses ALGORITHM GS of Statistical Computing - Kennedy & Gentle - - while 1: + while True: u = random() - b = (_e + alpha)/_e - p = b*u + b = (_e + alpha) / _e + p = b * u if p <= 1.0: - x = p ** (1.0/alpha) + x = p ** (1.0 / alpha) else: - x = -_log((b-p)/alpha) + x = -_log((b - p) / alpha) u1 = random() if p > 1.0: if u1 <= x ** (alpha - 1.0): @@ -659,7 +680,7 @@ def gammavariate(self, alpha, beta): break return x * beta -## -------------------- Gauss (faster alternative) -------------------- + ## -------------------- Gauss (faster alternative) -------------------- def gauss(self, mu, sigma): """Gaussian distribution. @@ -698,21 +719,21 @@ def gauss(self, mu, sigma): z = _cos(x2pi) * g2rad self.gauss_next = _sin(x2pi) * g2rad - return mu + z*sigma - -## -------------------- beta -------------------- -## See -## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html -## for Ivan Frohne's insightful analysis of why the original implementation: -## -## def betavariate(self, alpha, beta): -## # Discrete Event Simulation in C, pp 87-88. -## -## y = self.expovariate(alpha) -## z = self.expovariate(1.0/beta) -## return z/(y+z) -## -## was dead wrong, and how it probably got that way. + return mu + z * sigma + + ## -------------------- beta -------------------- + ## See + ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html + ## for Ivan Frohne's insightful analysis of why the original implementation: + ## + ## def betavariate(self, alpha, beta): + ## # Discrete Event Simulation in C, pp 87-88. + ## + ## y = self.expovariate(alpha) + ## z = self.expovariate(1.0/beta) + ## return z/(y+z) + ## + ## was dead wrong, and how it probably got that way. def betavariate(self, alpha, beta): """Beta distribution. @@ -725,21 +746,20 @@ def betavariate(self, alpha, beta): # This version due to Janne Sinkkonen, and matches all the std # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution"). y = self.gammavariate(alpha, 1.0) - if y == 0: - return 0.0 - else: + if y: return y / (y + self.gammavariate(beta, 1.0)) + return 0.0 -## -------------------- Pareto -------------------- + ## -------------------- Pareto -------------------- def paretovariate(self, alpha): """Pareto distribution. alpha is the shape parameter.""" # Jain, pg. 495 u = 1.0 - self.random() - return 1.0 / u ** (1.0/alpha) + return 1.0 / u ** (1.0 / alpha) -## -------------------- Weibull -------------------- + ## -------------------- Weibull -------------------- def weibullvariate(self, alpha, beta): """Weibull distribution. @@ -750,7 +770,8 @@ def weibullvariate(self, alpha, beta): # Jain, pg. 499; bug fix courtesy Bill Arms u = 1.0 - self.random() - return alpha * (-_log(u)) ** (1.0/beta) + return alpha * (-_log(u)) ** (1.0 / beta) + ## --------------- Operating System Random Source ------------------ @@ -789,6 +810,7 @@ def _notimplemented(self, *args, **kwds): raise NotImplementedError('System entropy source does not have state.') getstate = setstate = _notimplemented + ## -------------------- test program -------------------- def _test_generator(n, func, args): @@ -806,11 +828,10 @@ def _test_generator(n, func, args): smallest = min(x, smallest) largest = max(x, largest) t1 = time.perf_counter() - print(round(t1-t0, 3), 'sec,', end=' ') - avg = total/n - stddev = _sqrt(sqsum/n - avg*avg) - print('avg %g, stddev %g, min %g, max %g\n' % \ - (avg, stddev, smallest, largest)) + print(round(t1 - t0, 3), 'sec,', end=' ') + avg = total / n + stddev = _sqrt(sqsum / n - avg * avg) + print('avg %g, stddev %g, min %g, max %g\n' % (avg, stddev, smallest, largest)) def _test(N=2000): @@ -829,11 +850,11 @@ def _test(N=2000): _test_generator(N, gammavariate, (200.0, 1.0)) _test_generator(N, gauss, (0.0, 1.0)) _test_generator(N, betavariate, (3.0, 3.0)) - _test_generator(N, triangular, (0.0, 1.0, 1.0/3.0)) + _test_generator(N, triangular, (0.0, 1.0, 1.0 / 3.0)) # Create one instance, seeded from current time, and export its methods # as module-level functions. The functions share state across all uses -#(both in the user's code and in the Python libraries), but that's fine +# (both in the user's code and in the Python libraries), but that's fine # for most programs and is easier for the casual user than making them # instantiate their own Random() instance. From webhook-mailer at python.org Sat Jun 13 13:35:16 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Sat, 13 Jun 2020 17:35:16 -0000 Subject: [Python-checkins] bpo-37674: Tweak imp module deprecation note in the docs (GH-20480) Message-ID: https://github.com/python/cpython/commit/dea3223740127ac13f984c1d38f127ab6701af44 commit: dea3223740127ac13f984c1d38f127ab6701af44 branch: master author: Zackery Spytz committer: GitHub date: 2020-06-13T10:35:08-07:00 summary: bpo-37674: Tweak imp module deprecation note in the docs (GH-20480) files: M Doc/library/imp.rst diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst index f5ad8c7229644..121a730e0c9b4 100644 --- a/Doc/library/imp.rst +++ b/Doc/library/imp.rst @@ -8,7 +8,7 @@ **Source code:** :source:`Lib/imp.py` .. deprecated:: 3.4 - The :mod:`imp` package is pending deprecation in favor of :mod:`importlib`. + The :mod:`imp` module is deprecated in favor of :mod:`importlib`. .. index:: statement: import From webhook-mailer at python.org Sat Jun 13 16:56:00 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Jun 2020 20:56:00 -0000 Subject: [Python-checkins] [3.8] bpo-37674: Tweak imp module deprecation note in the docs (GH-20480) (GH-20860) Message-ID: https://github.com/python/cpython/commit/f8c05bb3a6f25224d7767561ec6a36a737e17779 commit: f8c05bb3a6f25224d7767561ec6a36a737e17779 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-13T13:55:56-07:00 summary: [3.8] bpo-37674: Tweak imp module deprecation note in the docs (GH-20480) (GH-20860) (cherry picked from commit dea3223740127ac13f984c1d38f127ab6701af44) Co-authored-by: Zackery Spytz Automerge-Triggered-By: @brettcannon files: M Doc/library/imp.rst diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst index f5ad8c7229644..121a730e0c9b4 100644 --- a/Doc/library/imp.rst +++ b/Doc/library/imp.rst @@ -8,7 +8,7 @@ **Source code:** :source:`Lib/imp.py` .. deprecated:: 3.4 - The :mod:`imp` package is pending deprecation in favor of :mod:`importlib`. + The :mod:`imp` module is deprecated in favor of :mod:`importlib`. .. index:: statement: import From webhook-mailer at python.org Sat Jun 13 16:56:15 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Jun 2020 20:56:15 -0000 Subject: [Python-checkins] [3.7] bpo-37674: Tweak imp module deprecation note in the docs (GH-20480) (GH-20861) Message-ID: https://github.com/python/cpython/commit/e1ca0c530104bd1e9fa61c167f267e6ca58a798a commit: e1ca0c530104bd1e9fa61c167f267e6ca58a798a branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-13T13:56:10-07:00 summary: [3.7] bpo-37674: Tweak imp module deprecation note in the docs (GH-20480) (GH-20861) (cherry picked from commit dea3223740127ac13f984c1d38f127ab6701af44) Co-authored-by: Zackery Spytz Automerge-Triggered-By: @brettcannon files: M Doc/library/imp.rst diff --git a/Doc/library/imp.rst b/Doc/library/imp.rst index f5ad8c7229644..121a730e0c9b4 100644 --- a/Doc/library/imp.rst +++ b/Doc/library/imp.rst @@ -8,7 +8,7 @@ **Source code:** :source:`Lib/imp.py` .. deprecated:: 3.4 - The :mod:`imp` package is pending deprecation in favor of :mod:`importlib`. + The :mod:`imp` module is deprecated in favor of :mod:`importlib`. .. index:: statement: import From webhook-mailer at python.org Sat Jun 13 18:55:57 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sat, 13 Jun 2020 22:55:57 -0000 Subject: [Python-checkins] bpo-40855: Fix ignored mu and xbar parameters (GH-20835) Message-ID: https://github.com/python/cpython/commit/d71ab4f73887a6e2b380ddbbfe35b600d236fd4a commit: d71ab4f73887a6e2b380ddbbfe35b600d236fd4a branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-13T15:55:52-07:00 summary: bpo-40855: Fix ignored mu and xbar parameters (GH-20835) files: A Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst M Lib/statistics.py M Lib/test/test_statistics.py diff --git a/Lib/statistics.py b/Lib/statistics.py index c76a6ca519e40..93a46334649df 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -682,8 +682,10 @@ def _ss(data, c=None): calculated from ``c`` as given. Use the second case with care, as it can lead to garbage results. """ - if c is None: - c = mean(data) + if c is not None: + T, total, count = _sum((x-c)**2 for x in data) + return (T, total) + c = mean(data) T, total, count = _sum((x-c)**2 for x in data) # The following sum should mathematically equal zero, but due to rounding # error may not. diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index 5c3b1fdd8b110..bf415dda557e6 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -2089,6 +2089,10 @@ def test_decimals(self): self.assertEqual(result, exact) self.assertIsInstance(result, Decimal) + def test_center_not_at_mean(self): + data = (1.0, 2.0) + self.assertEqual(self.func(data), 0.5) + self.assertEqual(self.func(data, xbar=2.0), 1.0) class TestPStdev(VarianceStdevMixin, NumericTestCase): # Tests for population standard deviation. @@ -2101,6 +2105,11 @@ def test_compare_to_variance(self): expected = math.sqrt(statistics.pvariance(data)) self.assertEqual(self.func(data), expected) + def test_center_not_at_mean(self): + # See issue: 40855 + data = (3, 6, 7, 10) + self.assertEqual(self.func(data), 2.5) + self.assertEqual(self.func(data, mu=0.5), 6.5) class TestStdev(VarianceStdevMixin, NumericTestCase): # Tests for sample standard deviation. @@ -2118,6 +2127,9 @@ def test_compare_to_variance(self): expected = math.sqrt(statistics.variance(data)) self.assertEqual(self.func(data), expected) + def test_center_not_at_mean(self): + data = (1.0, 2.0) + self.assertEqual(self.func(data, xbar=2.0), 1.0) class TestGeometricMean(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst b/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst new file mode 100644 index 0000000000000..201d510327a47 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst @@ -0,0 +1,2 @@ +The standard deviation and variance functions in the statistics module were +ignoring their mu and xbar arguments. From webhook-mailer at python.org Sat Jun 13 19:57:25 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 13 Jun 2020 23:57:25 -0000 Subject: [Python-checkins] bpo-40855: Fix ignored mu and xbar parameters (GH-20835) (GH-20863) Message-ID: https://github.com/python/cpython/commit/811e040b6e0241339545c2f055db8259b408802f commit: 811e040b6e0241339545c2f055db8259b408802f branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-13T16:57:17-07:00 summary: bpo-40855: Fix ignored mu and xbar parameters (GH-20835) (GH-20863) files: A Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst M Lib/statistics.py M Lib/test/test_statistics.py diff --git a/Lib/statistics.py b/Lib/statistics.py index 1e95c0b6639f1..c5c6e47fb3f3e 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -682,8 +682,10 @@ def _ss(data, c=None): calculated from ``c`` as given. Use the second case with care, as it can lead to garbage results. """ - if c is None: - c = mean(data) + if c is not None: + T, total, count = _sum((x-c)**2 for x in data) + return (T, total) + c = mean(data) T, total, count = _sum((x-c)**2 for x in data) # The following sum should mathematically equal zero, but due to rounding # error may not. diff --git a/Lib/test/test_statistics.py b/Lib/test/test_statistics.py index a9a427bc8d972..5b8ad874a9090 100644 --- a/Lib/test/test_statistics.py +++ b/Lib/test/test_statistics.py @@ -2029,6 +2029,10 @@ def test_decimals(self): self.assertEqual(result, exact) self.assertIsInstance(result, Decimal) + def test_center_not_at_mean(self): + data = (1.0, 2.0) + self.assertEqual(self.func(data), 0.5) + self.assertEqual(self.func(data, xbar=2.0), 1.0) class TestPStdev(VarianceStdevMixin, NumericTestCase): # Tests for population standard deviation. @@ -2041,6 +2045,11 @@ def test_compare_to_variance(self): expected = math.sqrt(statistics.pvariance(data)) self.assertEqual(self.func(data), expected) + def test_center_not_at_mean(self): + # See issue: 40855 + data = (3, 6, 7, 10) + self.assertEqual(self.func(data), 2.5) + self.assertEqual(self.func(data, mu=0.5), 6.5) class TestStdev(VarianceStdevMixin, NumericTestCase): # Tests for sample standard deviation. @@ -2058,6 +2067,9 @@ def test_compare_to_variance(self): expected = math.sqrt(statistics.variance(data)) self.assertEqual(self.func(data), expected) + def test_center_not_at_mean(self): + data = (1.0, 2.0) + self.assertEqual(self.func(data, xbar=2.0), 1.0) class TestGeometricMean(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst b/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst new file mode 100644 index 0000000000000..201d510327a47 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst @@ -0,0 +1,2 @@ +The standard deviation and variance functions in the statistics module were +ignoring their mu and xbar arguments. From webhook-mailer at python.org Sat Jun 13 22:17:36 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Sun, 14 Jun 2020 02:17:36 -0000 Subject: [Python-checkins] Some reformatting (suggested by Black) and minor factoring. (GH-20865) Message-ID: https://github.com/python/cpython/commit/5aad027db9618f22f6fa2274e05dd50f928d2ed7 commit: 5aad027db9618f22f6fa2274e05dd50f928d2ed7 branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-13T19:17:28-07:00 summary: Some reformatting (suggested by Black) and minor factoring. (GH-20865) files: M Lib/statistics.py diff --git a/Lib/statistics.py b/Lib/statistics.py index 93a46334649df..f9d3802ec5f83 100644 --- a/Lib/statistics.py +++ b/Lib/statistics.py @@ -163,7 +163,7 @@ def _sum(data, start=0): T = _coerce(int, type(start)) for typ, values in groupby(data, type): T = _coerce(T, typ) # or raise TypeError - for n,d in map(_exact_ratio, values): + for n, d in map(_exact_ratio, values): count += 1 partials[d] = partials_get(d, 0) + n if None in partials: @@ -261,7 +261,7 @@ def _convert(value, T): return T(value) except TypeError: if issubclass(T, Decimal): - return T(value.numerator)/T(value.denominator) + return T(value.numerator) / T(value.denominator) else: raise @@ -277,8 +277,8 @@ def _find_lteq(a, x): def _find_rteq(a, l, x): 'Locate the rightmost value exactly equal to x' i = bisect_right(a, x, lo=l) - if i != (len(a)+1) and a[i-1] == x: - return i-1 + if i != (len(a) + 1) and a[i - 1] == x: + return i - 1 raise ValueError @@ -315,7 +315,7 @@ def mean(data): raise StatisticsError('mean requires at least one data point') T, total, count = _sum(data) assert count == n - return _convert(total/n, T) + return _convert(total / n, T) def fmean(data): @@ -403,11 +403,11 @@ def harmonic_mean(data): else: raise TypeError('unsupported type') try: - T, total, count = _sum(1/x for x in _fail_neg(data, errmsg)) + T, total, count = _sum(1 / x for x in _fail_neg(data, errmsg)) except ZeroDivisionError: return 0 assert count == n - return _convert(n/total, T) + return _convert(n / total, T) # FIXME: investigate ways to calculate medians without sorting? Quickselect? @@ -428,11 +428,11 @@ def median(data): n = len(data) if n == 0: raise StatisticsError("no median for empty data") - if n%2 == 1: - return data[n//2] + if n % 2 == 1: + return data[n // 2] else: - i = n//2 - return (data[i - 1] + data[i])/2 + i = n // 2 + return (data[i - 1] + data[i]) / 2 def median_low(data): @@ -451,10 +451,10 @@ def median_low(data): n = len(data) if n == 0: raise StatisticsError("no median for empty data") - if n%2 == 1: - return data[n//2] + if n % 2 == 1: + return data[n // 2] else: - return data[n//2 - 1] + return data[n // 2 - 1] def median_high(data): @@ -473,7 +473,7 @@ def median_high(data): n = len(data) if n == 0: raise StatisticsError("no median for empty data") - return data[n//2] + return data[n // 2] def median_grouped(data, interval=1): @@ -510,15 +510,15 @@ class 3.5-4.5, and interpolation is used to estimate it. return data[0] # Find the value at the midpoint. Remember this corresponds to the # centre of the class interval. - x = data[n//2] + x = data[n // 2] for obj in (x, interval): if isinstance(obj, (str, bytes)): raise TypeError('expected number but got %r' % obj) try: - L = x - interval/2 # The lower limit of the median interval. + L = x - interval / 2 # The lower limit of the median interval. except TypeError: # Mixed type. For now we just coerce to float. - L = float(x) - float(interval)/2 + L = float(x) - float(interval) / 2 # Uses bisection search to search for x in data with log(n) time complexity # Find the position of leftmost occurrence of x in data @@ -528,7 +528,7 @@ class 3.5-4.5, and interpolation is used to estimate it. l2 = _find_rteq(data, l1, x) cf = l1 f = l2 - l1 + 1 - return L + interval*(n/2 - cf)/f + return L + interval * (n / 2 - cf) / f def mode(data): @@ -554,8 +554,7 @@ def mode(data): If *data* is empty, ``mode``, raises StatisticsError. """ - data = iter(data) - pairs = Counter(data).most_common(1) + pairs = Counter(iter(data)).most_common(1) try: return pairs[0][0] except IndexError: @@ -597,7 +596,7 @@ def multimode(data): # For sample data where there is a positive probability for values # beyond the range of the data, the R6 exclusive method is a # reasonable choice. Consider a random sample of nine values from a -# population with a uniform distribution from 0.0 to 100.0. The +# population with a uniform distribution from 0.0 to 1.0. The # distribution of the third ranked sample point is described by # betavariate(alpha=3, beta=7) which has mode=0.250, median=0.286, and # mean=0.300. Only the latter (which corresponds with R6) gives the @@ -643,9 +642,8 @@ def quantiles(data, *, n=4, method='exclusive'): m = ld - 1 result = [] for i in range(1, n): - j = i * m // n - delta = i*m - j*n - interpolated = (data[j] * (n - delta) + data[j+1] * delta) / n + j, delta = divmod(i * m, n) + interpolated = (data[j] * (n - delta) + data[j + 1] * delta) / n result.append(interpolated) return result if method == 'exclusive': @@ -655,7 +653,7 @@ def quantiles(data, *, n=4, method='exclusive'): j = i * m // n # rescale i to m/n j = 1 if j < 1 else ld-1 if j > ld-1 else j # clamp to 1 .. ld-1 delta = i*m - j*n # exact integer math - interpolated = (data[j-1] * (n - delta) + data[j] * delta) / n + interpolated = (data[j - 1] * (n - delta) + data[j] * delta) / n result.append(interpolated) return result raise ValueError(f'Unknown method: {method!r}') @@ -689,9 +687,9 @@ def _ss(data, c=None): T, total, count = _sum((x-c)**2 for x in data) # The following sum should mathematically equal zero, but due to rounding # error may not. - U, total2, count2 = _sum((x-c) for x in data) + U, total2, count2 = _sum((x - c) for x in data) assert T == U and count == count2 - total -= total2**2/len(data) + total -= total2 ** 2 / len(data) assert not total < 0, 'negative sum of square deviations: %f' % total return (T, total) @@ -740,7 +738,7 @@ def variance(data, xbar=None): if n < 2: raise StatisticsError('variance requires at least two data points') T, ss = _ss(data, xbar) - return _convert(ss/(n-1), T) + return _convert(ss / (n - 1), T) def pvariance(data, mu=None): @@ -784,7 +782,7 @@ def pvariance(data, mu=None): if n < 1: raise StatisticsError('pvariance requires at least one data point') T, ss = _ss(data, mu) - return _convert(ss/n, T) + return _convert(ss / n, T) def stdev(data, xbar=None): @@ -993,7 +991,7 @@ def overlap(self, other): if not isinstance(other, NormalDist): raise TypeError('Expected another NormalDist instance') X, Y = self, other - if (Y._sigma, Y._mu) < (X._sigma, X._mu): # sort to assure commutativity + if (Y._sigma, Y._mu) < (X._sigma, X._mu): # sort to assure commutativity X, Y = Y, X X_var, Y_var = X.variance, Y.variance if not X_var or not Y_var: From webhook-mailer at python.org Sun Jun 14 03:44:11 2020 From: webhook-mailer at python.org (Fantix King) Date: Sun, 14 Jun 2020 07:44:11 -0000 Subject: [Python-checkins] bpo-30064: Fix slow asyncio sock test (GH-20868) Message-ID: https://github.com/python/cpython/commit/8f04a84755babe516ebb5304904ea7c15b865c80 commit: 8f04a84755babe516ebb5304904ea7c15b865c80 branch: master author: Fantix King committer: GitHub date: 2020-06-14T00:43:57-07:00 summary: bpo-30064: Fix slow asyncio sock test (GH-20868) Using a log2n way to fill a much smaller buffer, and receiving in a cleaner way with EOF. The failing test was reproducible using the following command thanks to @aeros : ```bash ./python -m test test_asyncio.test_sock_lowlevel --match test_sock_client_racing -j100 -F -v ``` According to test results, we may still need to bump the timeout: https://github.com/python/cpython/blob/5aad027db9618f22f6fa2274e05dd50f928d2ed7/Lib/test/test_asyncio/test_sock_lowlevel.py#L256-L257 files: M Lib/test/test_asyncio/test_sock_lowlevel.py diff --git a/Lib/test/test_asyncio/test_sock_lowlevel.py b/Lib/test/test_asyncio/test_sock_lowlevel.py index 2c8ce6b657c14..e339ee9a4fc49 100644 --- a/Lib/test/test_asyncio/test_sock_lowlevel.py +++ b/Lib/test/test_asyncio/test_sock_lowlevel.py @@ -166,6 +166,7 @@ async def _basetest_sock_send_racing(self, listener, sock): listener.listen(1) # make connection + sock.setsockopt(socket.SOL_SOCKET, socket.SO_SNDBUF, 1024) sock.setblocking(False) task = asyncio.create_task( self.loop.sock_connect(sock, listener.getsockname())) @@ -176,10 +177,13 @@ async def _basetest_sock_send_racing(self, listener, sock): with server: await task - # fill the buffer - with self.assertRaises(BlockingIOError): - while True: - sock.send(b' ' * 5) + # fill the buffer until sending 5 chars would block + size = 8192 + while size >= 4: + with self.assertRaises(BlockingIOError): + while True: + sock.send(b' ' * size) + size = int(size / 2) # cancel a blocked sock_sendall task = asyncio.create_task( @@ -187,19 +191,21 @@ async def _basetest_sock_send_racing(self, listener, sock): await asyncio.sleep(0) task.cancel() - # clear the buffer - async def recv_until(): - data = b'' - while not data: - data = await self.loop.sock_recv(server, 1024) - data = data.strip() - return data - task = asyncio.create_task(recv_until()) + # receive everything that is not a space + async def recv_all(): + rv = b'' + while True: + buf = await self.loop.sock_recv(server, 8192) + if not buf: + return rv + rv += buf.strip() + task = asyncio.create_task(recv_all()) - # immediately register another sock_sendall + # immediately make another sock_sendall call await self.loop.sock_sendall(sock, b'world') + sock.shutdown(socket.SHUT_WR) data = await task - # ProactorEventLoop could deliver hello + # ProactorEventLoop could deliver hello, so endswith is necessary self.assertTrue(data.endswith(b'world')) # After the first connect attempt before the listener is ready, From webhook-mailer at python.org Sun Jun 14 13:23:56 2020 From: webhook-mailer at python.org (kevin seelbach) Date: Sun, 14 Jun 2020 17:23:56 -0000 Subject: [Python-checkins] Fixes dead links to Django's logging config docs (GH-20823) Message-ID: https://github.com/python/cpython/commit/714217f9561507bbc7218a02089d0e1da0239372 commit: 714217f9561507bbc7218a02089d0e1da0239372 branch: master author: kevin seelbach <2671980+kevinseelbach at users.noreply.github.com> committer: GitHub date: 2020-06-14T10:23:47-07:00 summary: Fixes dead links to Django's logging config docs (GH-20823) Fixes two outdated URLs to point at the current "stable" version of Django's logging documentation. Automerge-Triggered-By: @vsajip files: M Doc/howto/logging-cookbook.rst diff --git a/Doc/howto/logging-cookbook.rst b/Doc/howto/logging-cookbook.rst index 17f4ff6e474c2..de0f834551f5d 100644 --- a/Doc/howto/logging-cookbook.rst +++ b/Doc/howto/logging-cookbook.rst @@ -1368,7 +1368,7 @@ An example dictionary-based configuration ----------------------------------------- Below is an example of a logging configuration dictionary - it's taken from -the `documentation on the Django project `_. +the `documentation on the Django project `_. This dictionary is passed to :func:`~config.dictConfig` to put the configuration into effect:: LOGGING = { @@ -1424,7 +1424,7 @@ This dictionary is passed to :func:`~config.dictConfig` to put the configuration } For more information about this configuration, you can see the `relevant -section `_ +section `_ of the Django documentation. .. _cookbook-rotator-namer: From webhook-mailer at python.org Sun Jun 14 21:05:27 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 15 Jun 2020 01:05:27 -0000 Subject: [Python-checkins] bpo-40890: Fix compiler warning in dictobject.c (GH-20876) Message-ID: https://github.com/python/cpython/commit/10c3b2120afa01b2c310ac50e99d8b98c943b0a2 commit: 10c3b2120afa01b2c310ac50e99d8b98c943b0a2 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-15T02:05:20+01:00 summary: bpo-40890: Fix compiler warning in dictobject.c (GH-20876) files: M Objects/dictobject.c diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 48e96a09a5f87..55bf4aefbbeac 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -4123,8 +4123,7 @@ _PyDictView_New(PyObject *dict, PyTypeObject *type) } static PyObject * -dictview_mapping(PyObject *view) -{ +dictview_mapping(PyObject *view, void *Py_UNUSED(ignored)) { assert(view != NULL); assert(PyDictKeys_Check(view) || PyDictValues_Check(view) @@ -4134,7 +4133,7 @@ dictview_mapping(PyObject *view) } static PyGetSetDef dictview_getset[] = { - {"mapping", (getter)dictview_mapping, (setter)NULL, + {"mapping", dictview_mapping, (setter)NULL, "dictionary that this view refers to", NULL}, {0} }; From webhook-mailer at python.org Sun Jun 14 22:55:31 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 15 Jun 2020 02:55:31 -0000 Subject: [Python-checkins] Include soft keywords in keyword.py (GH-20877) Message-ID: https://github.com/python/cpython/commit/78319e373d57cd4da67660f888aa7092efbd6f24 commit: 78319e373d57cd4da67660f888aa7092efbd6f24 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-15T03:55:15+01:00 summary: Include soft keywords in keyword.py (GH-20877) files: M Lib/keyword.py M Tools/peg_generator/pegen/c_generator.py M Tools/peg_generator/pegen/keywordgen.py diff --git a/Lib/keyword.py b/Lib/keyword.py index b6a9982570211..ccc951500f6d8 100644 --- a/Lib/keyword.py +++ b/Lib/keyword.py @@ -13,7 +13,7 @@ Alternatively, you can run 'make regen-keyword'. """ -__all__ = ["iskeyword", "kwlist"] +__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] kwlist = [ 'False', @@ -53,4 +53,9 @@ 'yield' ] +softkwlist = [ + +] + iskeyword = frozenset(kwlist).__contains__ +issoftkeyword = frozenset(softkwlist).__contains__ diff --git a/Tools/peg_generator/pegen/c_generator.py b/Tools/peg_generator/pegen/c_generator.py index ce1d6bb7bf355..58a44fbe67e8b 100644 --- a/Tools/peg_generator/pegen/c_generator.py +++ b/Tools/peg_generator/pegen/c_generator.py @@ -105,6 +105,7 @@ def __init__( self.non_exact_tokens = non_exact_tokens self.cache: Dict[Any, FunctionCall] = {} self.keyword_cache: Dict[str, int] = {} + self.soft_keywords: Set[str] = set() def keyword_helper(self, keyword: str) -> FunctionCall: if keyword not in self.keyword_cache: @@ -119,6 +120,7 @@ def keyword_helper(self, keyword: str) -> FunctionCall: ) def soft_keyword_helper(self, value: str) -> FunctionCall: + self.soft_keywords.add(value.replace('"', "")) return FunctionCall( assigned_variable="_keyword", function="_PyPegen_expect_soft_keyword", diff --git a/Tools/peg_generator/pegen/keywordgen.py b/Tools/peg_generator/pegen/keywordgen.py index 8684944096654..639f01bf2373e 100644 --- a/Tools/peg_generator/pegen/keywordgen.py +++ b/Tools/peg_generator/pegen/keywordgen.py @@ -21,13 +21,18 @@ Alternatively, you can run 'make regen-keyword'. """ -__all__ = ["iskeyword", "kwlist"] +__all__ = ["iskeyword", "issoftkeyword", "kwlist", "softkwlist"] kwlist = [ - {keywords} +{keywords} +] + +softkwlist = [ +{soft_keywords} ] iskeyword = frozenset(kwlist).__contains__ +issoftkeyword = frozenset(softkwlist).__contains__ '''.lstrip() EXTRA_KEYWORDS = ["async", "await"] @@ -60,9 +65,11 @@ def main(): with args.keyword_file as thefile: all_keywords = sorted(list(gen.callmakervisitor.keyword_cache.keys()) + EXTRA_KEYWORDS) + all_soft_keywords = sorted(gen.callmakervisitor.soft_keywords) - keywords = ",\n ".join(map(repr, all_keywords)) - thefile.write(TEMPLATE.format(keywords=keywords)) + keywords = " " + ",\n ".join(map(repr, all_keywords)) + soft_keywords = " " + ",\n ".join(map(repr, all_soft_keywords)) + thefile.write(TEMPLATE.format(keywords=keywords, soft_keywords=soft_keywords)) if __name__ == "__main__": From webhook-mailer at python.org Sun Jun 14 23:33:38 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 15 Jun 2020 03:33:38 -0000 Subject: [Python-checkins] Fix trailing whitespace in keyword.py (GH-20881) Message-ID: https://github.com/python/cpython/commit/5fc4f8ae68aecf07f2ae4029dbcf997027489944 commit: 5fc4f8ae68aecf07f2ae4029dbcf997027489944 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-15T04:33:33+01:00 summary: Fix trailing whitespace in keyword.py (GH-20881) files: M Tools/peg_generator/pegen/keywordgen.py diff --git a/Tools/peg_generator/pegen/keywordgen.py b/Tools/peg_generator/pegen/keywordgen.py index 639f01bf2373e..53638b17e61c6 100644 --- a/Tools/peg_generator/pegen/keywordgen.py +++ b/Tools/peg_generator/pegen/keywordgen.py @@ -67,8 +67,8 @@ def main(): all_keywords = sorted(list(gen.callmakervisitor.keyword_cache.keys()) + EXTRA_KEYWORDS) all_soft_keywords = sorted(gen.callmakervisitor.soft_keywords) - keywords = " " + ",\n ".join(map(repr, all_keywords)) - soft_keywords = " " + ",\n ".join(map(repr, all_soft_keywords)) + keywords = "" if not all_keywords else " " + ",\n ".join(map(repr, all_keywords)) + soft_keywords = "" if not all_soft_keywords else " " + ",\n ".join(map(repr, all_soft_keywords)) thefile.write(TEMPLATE.format(keywords=keywords, soft_keywords=soft_keywords)) From webhook-mailer at python.org Mon Jun 15 08:33:53 2020 From: webhook-mailer at python.org (Niklas Fiekas) Date: Mon, 15 Jun 2020 12:33:53 -0000 Subject: [Python-checkins] bpo-29782: Consolidate _Py_Bit_Length() (GH-20739) Message-ID: https://github.com/python/cpython/commit/794e7d1ab2d7afe70fe0dd87ca8174ac860413e4 commit: 794e7d1ab2d7afe70fe0dd87ca8174ac860413e4 branch: master author: Niklas Fiekas committer: GitHub date: 2020-06-15T14:33:48+02:00 summary: bpo-29782: Consolidate _Py_Bit_Length() (GH-20739) In GH-2866, _Py_Bit_Length() was added to pymath.h for lack of a better location. GH-20518 added a more appropriate header file for bit utilities. It also shows how to properly use intrinsics. This allows reconsidering bpo-29782. * Move the function to the new header. * Changed return type to match __builtin_clzl() and reviewed usage. * Use intrinsics where available. * Pick a fallback implementation suitable for inlining. files: M Include/internal/pycore_bitutils.h M Include/pymath.h M Modules/_testinternalcapi.c M Modules/mathmodule.c M Objects/longobject.c M Python/pymath.c diff --git a/Include/internal/pycore_bitutils.h b/Include/internal/pycore_bitutils.h index 36ffe23b9ff26..0bd3270fe82e5 100644 --- a/Include/internal/pycore_bitutils.h +++ b/Include/internal/pycore_bitutils.h @@ -7,8 +7,8 @@ - _Py_bswap64(uint64_t) */ -#ifndef Py_INTERNAL_BSWAP_H -#define Py_INTERNAL_BSWAP_H +#ifndef Py_INTERNAL_BITUTILS_H +#define Py_INTERNAL_BITUTILS_H #ifdef __cplusplus extern "C" { #endif @@ -131,8 +131,47 @@ _Py_popcount32(uint32_t x) } +// Return the index of the most significant 1 bit in 'x'. This is the smallest +// integer k such that x < 2**k. Equivalent to floor(log2(x)) + 1 for x != 0. +static inline int +_Py_bit_length(unsigned long x) +{ +#if (defined(__clang__) || defined(__GNUC__)) + if (x != 0) { + // __builtin_clzl() is available since GCC 3.4. + // Undefined behavior for x == 0. + return (int)sizeof(unsigned long) * 8 - __builtin_clzl(x); + } + else { + return 0; + } +#elif defined(_MSC_VER) + // _BitScanReverse() is documented to search 32 bits. + Py_BUILD_ASSERT(sizeof(unsigned long) <= 4); + unsigned long msb; + if (_BitScanReverse(&msb, x)) { + return (int)msb + 1; + } + else { + return 0; + } +#else + const int BIT_LENGTH_TABLE[32] = { + 0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 + }; + int msb = 0; + while (x >= 32) { + msb += 6; + x >>= 6; + } + msb += BIT_LENGTH_TABLE[x]; + return msb; +#endif +} + + #ifdef __cplusplus } #endif -#endif /* !Py_INTERNAL_BSWAP_H */ - +#endif /* !Py_INTERNAL_BITUTILS_H */ diff --git a/Include/pymath.h b/Include/pymath.h index 63ca972784e31..f869724334a4c 100644 --- a/Include/pymath.h +++ b/Include/pymath.h @@ -227,12 +227,4 @@ PyAPI_FUNC(void) _Py_set_387controlword(unsigned short); * behavior. */ #define _Py_InIntegralTypeRange(type, v) (_Py_IntegralTypeMin(type) <= v && v <= _Py_IntegralTypeMax(type)) -/* Return the smallest integer k such that n < 2**k, or 0 if n == 0. - * Equivalent to floor(log2(x))+1. Also equivalent to: bitwidth_of_type - - * count_leading_zero_bits(x) - */ -#ifndef Py_LIMITED_API -PyAPI_FUNC(unsigned int) _Py_bit_length(unsigned long d); -#endif - #endif /* Py_PYMATH_H */ diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 6d5af5917f1f0..7970e2f4f443f 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -102,6 +102,45 @@ test_popcount(PyObject *self, PyObject *Py_UNUSED(args)) } +static int +check_bit_length(unsigned long x, int expected) +{ + // Use volatile to prevent the compiler to optimize out the whole test + volatile unsigned long u = x; + int len = _Py_bit_length(u); + if (len != expected) { + PyErr_Format(PyExc_AssertionError, + "_Py_bit_length(%lu) returns %i, expected %i", + x, len, expected); + return -1; + } + return 0; +} + + +static PyObject* +test_bit_length(PyObject *self, PyObject *Py_UNUSED(args)) +{ +#define CHECK(X, RESULT) \ + do { \ + if (check_bit_length(X, RESULT) < 0) { \ + return NULL; \ + } \ + } while (0) + + CHECK(0, 0); + CHECK(1, 1); + CHECK(0x1000, 13); + CHECK(0x1234, 13); + CHECK(0x54321, 19); + CHECK(0x7FFFFFFF, 31); + CHECK(0xFFFFFFFF, 32); + Py_RETURN_NONE; + +#undef CHECK +} + + #define TO_PTR(ch) ((void*)(uintptr_t)ch) #define FROM_PTR(ptr) ((uintptr_t)ptr) #define VALUE(key) (1 + ((int)(key) - 'a')) @@ -197,6 +236,7 @@ static PyMethodDef TestMethods[] = { {"get_recursion_depth", get_recursion_depth, METH_NOARGS}, {"test_bswap", test_bswap, METH_NOARGS}, {"test_popcount", test_popcount, METH_NOARGS}, + {"test_bit_length", test_bit_length, METH_NOARGS}, {"test_hashtable", test_hashtable, METH_NOARGS}, {NULL, NULL} /* sentinel */ }; diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index cb05ce7c50962..4450ce1894102 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -53,6 +53,7 @@ raised for division by zero and mod by zero. */ #include "Python.h" +#include "pycore_bitutils.h" // _Py_bit_length() #include "pycore_dtoa.h" #include "_math.h" diff --git a/Objects/longobject.c b/Objects/longobject.c index dead3e306943c..d92a9c56a7208 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -695,6 +695,13 @@ _PyLong_Sign(PyObject *vv) return Py_SIZE(v) == 0 ? 0 : (Py_SIZE(v) < 0 ? -1 : 1); } +static int +bit_length_digit(digit x) +{ + Py_BUILD_ASSERT(PyLong_SHIFT <= sizeof(unsigned long) * 8); + return _Py_bit_length((unsigned long)x); +} + size_t _PyLong_NumBits(PyObject *vv) { @@ -712,7 +719,7 @@ _PyLong_NumBits(PyObject *vv) if ((size_t)(ndigits - 1) > SIZE_MAX / (size_t)PyLong_SHIFT) goto Overflow; result = (size_t)(ndigits - 1) * (size_t)PyLong_SHIFT; - msd_bits = _Py_bit_length(msd); + msd_bits = bit_length_digit(msd); if (SIZE_MAX - msd_bits < result) goto Overflow; result += msd_bits; @@ -1822,7 +1829,7 @@ long_format_binary(PyObject *aa, int base, int alternate, return -1; } size_a_in_bits = (size_a - 1) * PyLong_SHIFT + - _Py_bit_length(a->ob_digit[size_a - 1]); + bit_length_digit(a->ob_digit[size_a - 1]); /* Allow 1 character for a '-' sign. */ sz = negative + (size_a_in_bits + (bits - 1)) / bits; } @@ -2642,7 +2649,7 @@ x_divrem(PyLongObject *v1, PyLongObject *w1, PyLongObject **prem) /* normalize: shift w1 left so that its top digit is >= PyLong_BASE/2. shift v1 left by the same amount. Results go into w and v. */ - d = PyLong_SHIFT - _Py_bit_length(w1->ob_digit[size_w-1]); + d = PyLong_SHIFT - bit_length_digit(w1->ob_digit[size_w-1]); carry = v_lshift(w->ob_digit, w1->ob_digit, size_w, d); assert(carry == 0); carry = v_lshift(v->ob_digit, v1->ob_digit, size_v, d); @@ -2764,7 +2771,7 @@ _PyLong_Frexp(PyLongObject *a, Py_ssize_t *e) *e = 0; return 0.0; } - a_bits = _Py_bit_length(a->ob_digit[a_size-1]); + a_bits = bit_length_digit(a->ob_digit[a_size-1]); /* The following is an overflow-free version of the check "if ((a_size - 1) * PyLong_SHIFT + a_bits > PY_SSIZE_T_MAX) ..." */ if (a_size >= (PY_SSIZE_T_MAX - 1) / PyLong_SHIFT + 1 && @@ -3857,8 +3864,8 @@ long_true_divide(PyObject *v, PyObject *w) /* Extreme underflow */ goto underflow_or_zero; /* Next line is now safe from overflowing a Py_ssize_t */ - diff = diff * PyLong_SHIFT + _Py_bit_length(a->ob_digit[a_size - 1]) - - _Py_bit_length(b->ob_digit[b_size - 1]); + diff = diff * PyLong_SHIFT + bit_length_digit(a->ob_digit[a_size - 1]) - + bit_length_digit(b->ob_digit[b_size - 1]); /* Now diff = a_bits - b_bits. */ if (diff > DBL_MAX_EXP) goto overflow; @@ -3934,7 +3941,7 @@ long_true_divide(PyObject *v, PyObject *w) } x_size = Py_ABS(Py_SIZE(x)); assert(x_size > 0); /* result of division is never zero */ - x_bits = (x_size-1)*PyLong_SHIFT+_Py_bit_length(x->ob_digit[x_size-1]); + x_bits = (x_size-1)*PyLong_SHIFT+bit_length_digit(x->ob_digit[x_size-1]); /* The number of extra bits that have to be rounded away. */ extra_bits = Py_MAX(x_bits, DBL_MIN_EXP - shift) - DBL_MANT_DIG; @@ -4748,7 +4755,7 @@ _PyLong_GCD(PyObject *aarg, PyObject *barg) alloc_b = Py_SIZE(b); /* reduce until a fits into 2 digits */ while ((size_a = Py_SIZE(a)) > 2) { - nbits = _Py_bit_length(a->ob_digit[size_a-1]); + nbits = bit_length_digit(a->ob_digit[size_a-1]); /* extract top 2*PyLong_SHIFT bits of a into x, along with corresponding bits of b into y */ size_b = Py_SIZE(b); @@ -5269,7 +5276,7 @@ int_bit_length_impl(PyObject *self) return PyLong_FromLong(0); msd = ((PyLongObject *)self)->ob_digit[ndigits-1]; - msd_bits = _Py_bit_length(msd); + msd_bits = bit_length_digit(msd); if (ndigits <= PY_SSIZE_T_MAX/PyLong_SHIFT) return PyLong_FromSsize_t((ndigits-1)*PyLong_SHIFT + msd_bits); diff --git a/Python/pymath.c b/Python/pymath.c index a08a0e796156f..24b804223eef1 100644 --- a/Python/pymath.c +++ b/Python/pymath.c @@ -79,18 +79,3 @@ round(double x) return copysign(y, x); } #endif /* HAVE_ROUND */ - -static const unsigned int BitLengthTable[32] = { - 0, 1, 2, 2, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 4, - 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 -}; - -unsigned int _Py_bit_length(unsigned long d) { - unsigned int d_bits = 0; - while (d >= 32) { - d_bits += 6; - d >>= 6; - } - d_bits += BitLengthTable[d]; - return d_bits; -} From webhook-mailer at python.org Mon Jun 15 09:23:51 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Mon, 15 Jun 2020 13:23:51 -0000 Subject: [Python-checkins] Improve readability and style in parser files (GH-20884) Message-ID: https://github.com/python/cpython/commit/fb61c42361a666f589c59d4d737c2da6817b992b commit: fb61c42361a666f589c59d4d737c2da6817b992b branch: master author: Pablo Galindo committer: GitHub date: 2020-06-15T14:23:43+01:00 summary: Improve readability and style in parser files (GH-20884) files: M Parser/pegen.c M Parser/string_parser.c diff --git a/Parser/pegen.c b/Parser/pegen.c index e29910bf86ed5..4cff7342edbbc 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -67,10 +67,11 @@ _PyPegen_check_barry_as_flufl(Parser *p) { assert(t->type == NOTEQUAL); char* tok_str = PyBytes_AS_STRING(t->bytes); - if (p->flags & PyPARSE_BARRY_AS_BDFL && strcmp(tok_str, "<>")){ + if (p->flags & PyPARSE_BARRY_AS_BDFL && strcmp(tok_str, "<>") != 0) { RAISE_SYNTAX_ERROR("with Barry as BDFL, use '<>' instead of '!='"); return -1; - } else if (!(p->flags & PyPARSE_BARRY_AS_BDFL)) { + } + if (!(p->flags & PyPARSE_BARRY_AS_BDFL)) { return strcmp(tok_str, "!="); } return 0; @@ -245,7 +246,10 @@ raise_decode_error(Parser *p) errtype = "value error"; } if (errtype) { - PyObject *type, *value, *tback, *errstr; + PyObject *type; + PyObject *value; + PyObject *tback; + PyObject *errstr; PyErr_Fetch(&type, &value, &tback); errstr = PyObject_Str(value); if (errstr) { @@ -274,7 +278,9 @@ raise_tokenizer_init_error(PyObject *filename) } PyObject *errstr = NULL; PyObject *tuple = NULL; - PyObject *type, *value, *tback; + PyObject *type; + PyObject *value; + PyObject *tback; PyErr_Fetch(&type, &value, &tback); errstr = PyObject_Str(value); if (!errstr) { @@ -548,7 +554,8 @@ growable_comment_array_deallocate(growable_comment_array *arr) { int _PyPegen_fill_token(Parser *p) { - const char *start, *end; + const char *start; + const char *end; int type = PyTokenizer_Get(p->tok, &start, &end); // Record and skip '# type: ignore' comments @@ -589,9 +596,8 @@ _PyPegen_fill_token(Parser *p) PyErr_NoMemory(); return -1; } - else { - p->tokens = new_tokens; - } + p->tokens = new_tokens; + for (int i = p->size; i < newsize; i++) { p->tokens[i] = PyMem_Malloc(sizeof(Token)); if (p->tokens[i] == NULL) { @@ -615,7 +621,8 @@ _PyPegen_fill_token(Parser *p) int lineno = type == STRING ? p->tok->first_lineno : p->tok->lineno; const char *line_start = type == STRING ? p->tok->multi_line_start : p->tok->line_start; int end_lineno = p->tok->lineno; - int col_offset = -1, end_col_offset = -1; + int col_offset = -1; + int end_col_offset = -1; if (start != NULL && start >= line_start) { col_offset = (int)(start - line_start); } @@ -634,9 +641,8 @@ _PyPegen_fill_token(Parser *p) if (p->tok->done == E_DECODE) { return raise_decode_error(p); } - else { - return tokenizer_error(p); - } + return tokenizer_error(p); + } return 0; @@ -847,33 +853,36 @@ parsenumber_raw(const char *s) return PyLong_FromString(s, (char **)0, 0); } } - else + else { x = PyOS_strtol(s, (char **)&end, 0); + } if (*end == '\0') { - if (errno != 0) + if (errno != 0) { return PyLong_FromString(s, (char **)0, 0); + } return PyLong_FromLong(x); } /* XXX Huge floats may silently fail */ if (imflag) { compl.real = 0.; compl.imag = PyOS_string_to_double(s, (char **)&end, NULL); - if (compl.imag == -1.0 && PyErr_Occurred()) + if (compl.imag == -1.0 && PyErr_Occurred()) { return NULL; + } return PyComplex_FromCComplex(compl); } - else { - dx = PyOS_string_to_double(s, NULL, NULL); - if (dx == -1.0 && PyErr_Occurred()) - return NULL; - return PyFloat_FromDouble(dx); + dx = PyOS_string_to_double(s, NULL, NULL); + if (dx == -1.0 && PyErr_Occurred()) { + return NULL; } + return PyFloat_FromDouble(dx); } static PyObject * parsenumber(const char *s) { - char *dup, *end; + char *dup; + char *end; PyObject *res = NULL; assert(s != NULL); diff --git a/Parser/string_parser.c b/Parser/string_parser.c index cb2332bad0e9d..7d50e43f4e342 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -42,7 +42,8 @@ warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char, static PyObject * decode_utf8(const char **sPtr, const char *end) { - const char *s, *t; + const char *s; + const char *t; t = s = *sPtr; while (s < end && (*s & 0x80)) { s++; @@ -54,7 +55,8 @@ decode_utf8(const char **sPtr, const char *end) static PyObject * decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) { - PyObject *v, *u; + PyObject *v; + PyObject *u; char *buf; char *p; const char *end; @@ -86,7 +88,8 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) PyObject *w; int kind; void *data; - Py_ssize_t len, i; + Py_ssize_t w_len; + Py_ssize_t i; w = decode_utf8(&s, end); if (w == NULL) { Py_DECREF(u); @@ -94,8 +97,8 @@ decode_unicode_with_escapes(Parser *parser, const char *s, size_t len, Token *t) } kind = PyUnicode_KIND(w); data = PyUnicode_DATA(w); - len = PyUnicode_GET_LENGTH(w); - for (i = 0; i < len; i++) { + w_len = PyUnicode_GET_LENGTH(w); + for (i = 0; i < w_len; i++) { Py_UCS4 chr = PyUnicode_READ(kind, data, i); sprintf(p, "\\U%08x", chr); p += 10; @@ -169,18 +172,18 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, if (Py_ISALPHA(quote)) { while (!*bytesmode || !*rawmode) { if (quote == 'b' || quote == 'B') { - quote = *++s; + quote =(unsigned char)*++s; *bytesmode = 1; } else if (quote == 'u' || quote == 'U') { - quote = *++s; + quote = (unsigned char)*++s; } else if (quote == 'r' || quote == 'R') { - quote = *++s; + quote = (unsigned char)*++s; *rawmode = 1; } else if (quote == 'f' || quote == 'F') { - quote = *++s; + quote = (unsigned char)*++s; fmode = 1; } else { @@ -370,112 +373,112 @@ static void fstring_shift_arguments(expr_ty parent, arguments_ty args, int linen fstring_shift_seq_locations(parent, args->defaults, lineno, col_offset); } -static void fstring_shift_children_locations(expr_ty n, int lineno, int col_offset) { - switch (n->kind) { +static void fstring_shift_children_locations(expr_ty node, int lineno, int col_offset) { + switch (node->kind) { case BoolOp_kind: - fstring_shift_seq_locations(n, n->v.BoolOp.values, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.BoolOp.values, lineno, col_offset); break; case NamedExpr_kind: - shift_expr(n, n->v.NamedExpr.target, lineno, col_offset); - shift_expr(n, n->v.NamedExpr.value, lineno, col_offset); + shift_expr(node, node->v.NamedExpr.target, lineno, col_offset); + shift_expr(node, node->v.NamedExpr.value, lineno, col_offset); break; case BinOp_kind: - shift_expr(n, n->v.BinOp.left, lineno, col_offset); - shift_expr(n, n->v.BinOp.right, lineno, col_offset); + shift_expr(node, node->v.BinOp.left, lineno, col_offset); + shift_expr(node, node->v.BinOp.right, lineno, col_offset); break; case UnaryOp_kind: - shift_expr(n, n->v.UnaryOp.operand, lineno, col_offset); + shift_expr(node, node->v.UnaryOp.operand, lineno, col_offset); break; case Lambda_kind: - fstring_shift_arguments(n, n->v.Lambda.args, lineno, col_offset); - shift_expr(n, n->v.Lambda.body, lineno, col_offset); + fstring_shift_arguments(node, node->v.Lambda.args, lineno, col_offset); + shift_expr(node, node->v.Lambda.body, lineno, col_offset); break; case IfExp_kind: - shift_expr(n, n->v.IfExp.test, lineno, col_offset); - shift_expr(n, n->v.IfExp.body, lineno, col_offset); - shift_expr(n, n->v.IfExp.orelse, lineno, col_offset); + shift_expr(node, node->v.IfExp.test, lineno, col_offset); + shift_expr(node, node->v.IfExp.body, lineno, col_offset); + shift_expr(node, node->v.IfExp.orelse, lineno, col_offset); break; case Dict_kind: - fstring_shift_seq_locations(n, n->v.Dict.keys, lineno, col_offset); - fstring_shift_seq_locations(n, n->v.Dict.values, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.Dict.keys, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.Dict.values, lineno, col_offset); break; case Set_kind: - fstring_shift_seq_locations(n, n->v.Set.elts, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.Set.elts, lineno, col_offset); break; case ListComp_kind: - shift_expr(n, n->v.ListComp.elt, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(n->v.ListComp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(n->v.ListComp.generators, i); - fstring_shift_comprehension(n, comp, lineno, col_offset); + shift_expr(node, node->v.ListComp.elt, lineno, col_offset); + for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.ListComp.generators); i < l; i++) { + comprehension_ty comp = asdl_seq_GET(node->v.ListComp.generators, i); + fstring_shift_comprehension(node, comp, lineno, col_offset); } break; case SetComp_kind: - shift_expr(n, n->v.SetComp.elt, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(n->v.SetComp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(n->v.SetComp.generators, i); - fstring_shift_comprehension(n, comp, lineno, col_offset); + shift_expr(node, node->v.SetComp.elt, lineno, col_offset); + for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.SetComp.generators); i < l; i++) { + comprehension_ty comp = asdl_seq_GET(node->v.SetComp.generators, i); + fstring_shift_comprehension(node, comp, lineno, col_offset); } break; case DictComp_kind: - shift_expr(n, n->v.DictComp.key, lineno, col_offset); - shift_expr(n, n->v.DictComp.value, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(n->v.DictComp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(n->v.DictComp.generators, i); - fstring_shift_comprehension(n, comp, lineno, col_offset); + shift_expr(node, node->v.DictComp.key, lineno, col_offset); + shift_expr(node, node->v.DictComp.value, lineno, col_offset); + for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.DictComp.generators); i < l; i++) { + comprehension_ty comp = asdl_seq_GET(node->v.DictComp.generators, i); + fstring_shift_comprehension(node, comp, lineno, col_offset); } break; case GeneratorExp_kind: - shift_expr(n, n->v.GeneratorExp.elt, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(n->v.GeneratorExp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(n->v.GeneratorExp.generators, i); - fstring_shift_comprehension(n, comp, lineno, col_offset); + shift_expr(node, node->v.GeneratorExp.elt, lineno, col_offset); + for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.GeneratorExp.generators); i < l; i++) { + comprehension_ty comp = asdl_seq_GET(node->v.GeneratorExp.generators, i); + fstring_shift_comprehension(node, comp, lineno, col_offset); } break; case Await_kind: - shift_expr(n, n->v.Await.value, lineno, col_offset); + shift_expr(node, node->v.Await.value, lineno, col_offset); break; case Yield_kind: - shift_expr(n, n->v.Yield.value, lineno, col_offset); + shift_expr(node, node->v.Yield.value, lineno, col_offset); break; case YieldFrom_kind: - shift_expr(n, n->v.YieldFrom.value, lineno, col_offset); + shift_expr(node, node->v.YieldFrom.value, lineno, col_offset); break; case Compare_kind: - shift_expr(n, n->v.Compare.left, lineno, col_offset); - fstring_shift_seq_locations(n, n->v.Compare.comparators, lineno, col_offset); + shift_expr(node, node->v.Compare.left, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.Compare.comparators, lineno, col_offset); break; case Call_kind: - shift_expr(n, n->v.Call.func, lineno, col_offset); - fstring_shift_seq_locations(n, n->v.Call.args, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(n->v.Call.keywords); i < l; i++) { - keyword_ty keyword = asdl_seq_GET(n->v.Call.keywords, i); - shift_expr(n, keyword->value, lineno, col_offset); + shift_expr(node, node->v.Call.func, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.Call.args, lineno, col_offset); + for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.Call.keywords); i < l; i++) { + keyword_ty keyword = asdl_seq_GET(node->v.Call.keywords, i); + shift_expr(node, keyword->value, lineno, col_offset); } break; case Attribute_kind: - shift_expr(n, n->v.Attribute.value, lineno, col_offset); + shift_expr(node, node->v.Attribute.value, lineno, col_offset); break; case Subscript_kind: - shift_expr(n, n->v.Subscript.value, lineno, col_offset); - fstring_shift_slice_locations(n, n->v.Subscript.slice, lineno, col_offset); - shift_expr(n, n->v.Subscript.slice, lineno, col_offset); + shift_expr(node, node->v.Subscript.value, lineno, col_offset); + fstring_shift_slice_locations(node, node->v.Subscript.slice, lineno, col_offset); + shift_expr(node, node->v.Subscript.slice, lineno, col_offset); break; case Starred_kind: - shift_expr(n, n->v.Starred.value, lineno, col_offset); + shift_expr(node, node->v.Starred.value, lineno, col_offset); break; case List_kind: - fstring_shift_seq_locations(n, n->v.List.elts, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.List.elts, lineno, col_offset); break; case Tuple_kind: - fstring_shift_seq_locations(n, n->v.Tuple.elts, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.Tuple.elts, lineno, col_offset); break; case JoinedStr_kind: - fstring_shift_seq_locations(n, n->v.JoinedStr.values, lineno, col_offset); + fstring_shift_seq_locations(node, node->v.JoinedStr.values, lineno, col_offset); break; case FormattedValue_kind: - shift_expr(n, n->v.FormattedValue.value, lineno, col_offset); - if (n->v.FormattedValue.format_spec) { - shift_expr(n, n->v.FormattedValue.format_spec, lineno, col_offset); + shift_expr(node, node->v.FormattedValue.value, lineno, col_offset); + if (node->v.FormattedValue.format_spec) { + shift_expr(node, node->v.FormattedValue.format_spec, lineno, col_offset); } break; default: @@ -710,15 +713,17 @@ fstring_find_literal(Parser *p, const char **str, const char *end, int raw, assert(s == end || *s == '{' || *s == '}'); done: if (literal_start != s) { - if (raw) + if (raw) { *literal = PyUnicode_DecodeUTF8Stateful(literal_start, s - literal_start, NULL, NULL); - else + } else { *literal = decode_unicode_with_escapes(p, literal_start, s - literal_start, t); - if (!*literal) + } + if (!*literal) { return -1; + } } return result; } @@ -790,10 +795,11 @@ fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int rec /* Loop invariants. */ assert(nested_depth >= 0); assert(*str >= expr_start && *str < end); - if (quote_char) + if (quote_char) { assert(string_type == 1 || string_type == 3); - else + } else { assert(string_type == 0); + } ch = **str; /* Nowhere inside an expression is a backslash allowed. */ @@ -890,7 +896,7 @@ fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int rec goto error; } nested_depth--; - int opening = parenstack[nested_depth]; + int opening = (unsigned char)parenstack[nested_depth]; if (!((opening == '(' && ch == ')') || (opening == '[' && ch == ']') || (opening == '{' && ch == '}'))) @@ -915,20 +921,22 @@ fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int rec goto error; } if (nested_depth) { - int opening = parenstack[nested_depth - 1]; + int opening = (unsigned char)parenstack[nested_depth - 1]; RAISE_SYNTAX_ERROR("f-string: unmatched '%c'", opening); goto error; } - if (*str >= end) + if (*str >= end) { goto unexpected_end_of_string; + } /* Compile the expression as soon as possible, so we show errors related to the expression before errors related to the conversion or format_spec. */ simple_expression = fstring_compile_expr(p, expr_start, expr_end, t); - if (!simple_expression) + if (!simple_expression) { goto error; + } /* Check for =, which puts the text value of the expression in expr_text. */ @@ -957,10 +965,11 @@ fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int rec /* Check for a conversion char, if present. */ if (**str == '!') { *str += 1; - if (*str >= end) + if (*str >= end) { goto unexpected_end_of_string; + } - conversion = **str; + conversion = (unsigned char)**str; *str += 1; /* Validate the conversion. */ @@ -974,22 +983,26 @@ fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int rec } /* Check for the format spec, if present. */ - if (*str >= end) + if (*str >= end) { goto unexpected_end_of_string; + } if (**str == ':') { *str += 1; - if (*str >= end) + if (*str >= end) { goto unexpected_end_of_string; + } /* Parse the format spec. */ format_spec = fstring_parse(p, str, end, raw, recurse_lvl+1, first_token, t, last_token); - if (!format_spec) + if (!format_spec) { goto error; + } } - if (*str >= end || **str != '}') + if (*str >= end || **str != '}') { goto unexpected_end_of_string; + } /* We're at a right brace. Consume it. */ assert(*str < end); @@ -1009,8 +1022,9 @@ fstring_find_expr(Parser *p, const char **str, const char *end, int raw, int rec format_spec, first_token->lineno, first_token->col_offset, last_token->end_lineno, last_token->end_col_offset, p->arena); - if (!*expression) + if (!*expression) { goto error; + } return 0; @@ -1059,28 +1073,32 @@ fstring_find_literal_and_expr(Parser *p, const char **str, const char *end, int /* Get any literal string. */ result = fstring_find_literal(p, str, end, raw, literal, recurse_lvl, t); - if (result < 0) + if (result < 0) { goto error; + } assert(result == 0 || result == 1); - if (result == 1) + if (result == 1) { /* We have a literal, but don't look at the expression. */ return 1; + } - if (*str >= end || **str == '}') + if (*str >= end || **str == '}') { /* We're at the end of the string or the end of a nested f-string: no expression. The top-level error case where we expect to be at the end of the string but we're at a '}' is handled later. */ return 0; + } /* We must now be the start of an expression, on a '{'. */ assert(**str == '{'); if (fstring_find_expr(p, str, end, raw, recurse_lvl, expr_text, - expression, first_token, t, last_token) < 0) + expression, first_token, t, last_token) < 0) { goto error; + } return 0; @@ -1099,8 +1117,9 @@ ExprList_check_invariants(ExprList *l) hasn't been deallocated. */ assert(l->size >= 0); assert(l->p != NULL); - if (l->size <= EXPRLIST_N_CACHED) + if (l->size <= EXPRLIST_N_CACHED) { assert(l->data == l->p); + } } #endif @@ -1130,11 +1149,13 @@ ExprList_Append(ExprList *l, expr_ty exp) /* We're still using the cached data. Switch to alloc-ing. */ l->p = PyMem_RawMalloc(sizeof(expr_ty) * new_size); - if (!l->p) + if (!l->p) { return -1; + } /* Copy the cached data into the new buffer. */ - for (i = 0; i < l->size; i++) + for (i = 0; i < l->size; i++) { l->p[i] = l->data[i]; + } } else { /* Just realloc. */ expr_ty *tmp = PyMem_RawRealloc(l->p, sizeof(expr_ty) * new_size); @@ -1184,8 +1205,9 @@ ExprList_Finish(ExprList *l, PyArena *arena) seq = _Py_asdl_seq_new(l->size, arena); if (seq) { Py_ssize_t i; - for (i = 0; i < l->size; i++) + for (i = 0; i < l->size; i++) { asdl_seq_SET(seq, i, l->p[i]); + } } ExprList_Dealloc(l); return seq; @@ -1197,8 +1219,9 @@ ExprList_Finish(ExprList *l, PyArena *arena) static void FstringParser_check_invariants(FstringParser *state) { - if (state->last_str) + if (state->last_str) { assert(PyUnicode_CheckExact(state->last_str)); + } ExprList_check_invariants(&state->expr_list); } #endif @@ -1268,8 +1291,9 @@ _PyPegen_FstringParser_ConcatAndDel(FstringParser *state, PyObject *str) } else { /* Concatenate this with the previous string. */ PyUnicode_AppendAndDel(&state->last_str, str); - if (!state->last_str) + if (!state->last_str) { return -1; + } } FstringParser_check_invariants(state); return 0; @@ -1298,8 +1322,9 @@ _PyPegen_FstringParser_ConcatFstring(Parser *p, FstringParser *state, const char int result = fstring_find_literal_and_expr(p, str, end, raw, recurse_lvl, &literal, &expr_text, &expression, first_token, t, last_token); - if (result < 0) + if (result < 0) { return -1; + } /* Add the literal, if any. */ if (literal && _PyPegen_FstringParser_ConcatAndDel(state, literal) < 0) { @@ -1318,12 +1343,14 @@ _PyPegen_FstringParser_ConcatFstring(Parser *p, FstringParser *state, const char and expression, while ignoring the expression this time. This is used for un-doubling braces, as an optimization. */ - if (result == 1) + if (result == 1) { continue; + } - if (!expression) + if (!expression) { /* We're done with this f-string. */ break; + } /* We know we have an expression. Convert any existing string to a Constant node. */ @@ -1331,13 +1358,15 @@ _PyPegen_FstringParser_ConcatFstring(Parser *p, FstringParser *state, const char /* Do nothing. No previous literal. */ } else { /* Convert the existing last_str literal to a Constant node. */ - expr_ty str = make_str_node_and_del(p, &state->last_str, first_token, last_token); - if (!str || ExprList_Append(&state->expr_list, str) < 0) + expr_ty last_str = make_str_node_and_del(p, &state->last_str, first_token, last_token); + if (!last_str || ExprList_Append(&state->expr_list, last_str) < 0) { return -1; + } } - if (ExprList_Append(&state->expr_list, expression) < 0) + if (ExprList_Append(&state->expr_list, expression) < 0) { return -1; + } } /* If recurse_lvl is zero, then we must be at the end of the @@ -1373,8 +1402,9 @@ _PyPegen_FstringParser_Finish(Parser *p, FstringParser *state, Token* first_toke if (!state->last_str) { /* Create a zero length string. */ state->last_str = PyUnicode_FromStringAndSize(NULL, 0); - if (!state->last_str) + if (!state->last_str) { goto error; + } } return make_str_node_and_del(p, &state->last_str, first_token, last_token); } @@ -1383,15 +1413,17 @@ _PyPegen_FstringParser_Finish(Parser *p, FstringParser *state, Token* first_toke last node in our expression list. */ if (state->last_str) { expr_ty str = make_str_node_and_del(p, &state->last_str, first_token, last_token); - if (!str || ExprList_Append(&state->expr_list, str) < 0) + if (!str || ExprList_Append(&state->expr_list, str) < 0) { goto error; + } } /* This has already been freed. */ assert(state->last_str == NULL); seq = ExprList_Finish(&state->expr_list, p->arena); - if (!seq) + if (!seq) { goto error; + } return _Py_JoinedStr(seq, first_token->lineno, first_token->col_offset, last_token->end_lineno, last_token->end_col_offset, p->arena); From webhook-mailer at python.org Mon Jun 15 10:27:55 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Jun 2020 14:27:55 -0000 Subject: [Python-checkins] bpo-40910: PyConfig_Clear() clears _orig_argv (GH-20886) Message-ID: https://github.com/python/cpython/commit/e2d47a0568c6da9229580829917fd6aa702133b3 commit: e2d47a0568c6da9229580829917fd6aa702133b3 branch: master author: Victor Stinner committer: GitHub date: 2020-06-15T16:27:47+02:00 summary: bpo-40910: PyConfig_Clear() clears _orig_argv (GH-20886) bpo-40910, bpo-40953: PyConfig_Clear() clears _orig_argv. files: M Python/initconfig.c diff --git a/Python/initconfig.c b/Python/initconfig.c index d8b3df885722f..96169454506cb 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -600,6 +600,8 @@ PyConfig_Clear(PyConfig *config) CLEAR(config->run_module); CLEAR(config->run_filename); CLEAR(config->check_hash_pycs_mode); + + _PyWideStringList_Clear(&config->_orig_argv); #undef CLEAR } From webhook-mailer at python.org Mon Jun 15 10:59:03 2020 From: webhook-mailer at python.org (roger) Date: Mon, 15 Jun 2020 14:59:03 -0000 Subject: [Python-checkins] bpo-34226: fix cgi.parse_multipart without content_length (GH-8530) Message-ID: https://github.com/python/cpython/commit/d8cf3514dd4682419a66f6e834bb384ee34afc95 commit: d8cf3514dd4682419a66f6e834bb384ee34afc95 branch: master author: roger committer: GitHub date: 2020-06-15T07:58:54-07:00 summary: bpo-34226: fix cgi.parse_multipart without content_length (GH-8530) In Python 3.7 the behavior of parse_multipart changed requiring CONTENT-LENGTH header, this fix remove this header as required and fix FieldStorage read_lines_to_outerboundary, by not using limit when it's negative, since by default it's -1 if not content-length and keeps substracting what was read from the file object. Also added a test case for this problem. files: A Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst M Lib/cgi.py M Lib/test/test_cgi.py diff --git a/Lib/cgi.py b/Lib/cgi.py index c22c71b387851..77ab703cc0360 100755 --- a/Lib/cgi.py +++ b/Lib/cgi.py @@ -200,7 +200,10 @@ def parse_multipart(fp, pdict, encoding="utf-8", errors="replace"): ctype = "multipart/form-data; boundary={}".format(boundary) headers = Message() headers.set_type(ctype) - headers['Content-Length'] = pdict['CONTENT-LENGTH'] + try: + headers['Content-Length'] = pdict['CONTENT-LENGTH'] + except KeyError: + pass fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors, environ={'REQUEST_METHOD': 'POST'}) return {k: fs.getlist(k) for k in fs} @@ -736,7 +739,8 @@ def read_lines_to_outerboundary(self): last_line_lfend = True _read = 0 while 1: - if self.limit is not None and _read >= self.limit: + + if self.limit is not None and 0 <= self.limit <= _read: break line = self.fp.readline(1<<16) # bytes self.bytes_read += len(line) diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py index ab8677199f32e..101942de947fb 100644 --- a/Lib/test/test_cgi.py +++ b/Lib/test/test_cgi.py @@ -128,6 +128,20 @@ def test_parse_multipart(self): 'file': [b'Testing 123.\n'], 'title': ['']} self.assertEqual(result, expected) + def test_parse_multipart_without_content_length(self): + POSTDATA = '''--JfISa01 +Content-Disposition: form-data; name="submit-name" + +just a string + +--JfISa01-- +''' + fp = BytesIO(POSTDATA.encode('latin1')) + env = {'boundary': 'JfISa01'.encode('latin1')} + result = cgi.parse_multipart(fp, env) + expected = {'submit-name': ['just a string\n']} + self.assertEqual(result, expected) + def test_parse_multipart_invalid_encoding(self): BOUNDARY = "JfISa01" POSTDATA = """--JfISa01 diff --git a/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst b/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst new file mode 100644 index 0000000000000..2656b4bf22ae4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst @@ -0,0 +1 @@ +Fix `cgi.parse_multipart` without content_length. Patch by Roger Duran From webhook-mailer at python.org Mon Jun 15 11:33:41 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 15 Jun 2020 15:33:41 -0000 Subject: [Python-checkins] [3.7] bpo-34226: fix cgi.parse_multipart without content_length (GH-8530) (GH-20892) Message-ID: https://github.com/python/cpython/commit/aa83935a56d1fd4d72d4de5f0278a240a2d6844d commit: aa83935a56d1fd4d72d4de5f0278a240a2d6844d branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-15T08:33:32-07:00 summary: [3.7] bpo-34226: fix cgi.parse_multipart without content_length (GH-8530) (GH-20892) In Python 3.7 the behavior of parse_multipart changed requiring CONTENT-LENGTH header, this fix remove this header as required and fix FieldStorage read_lines_to_outerboundary, by not using limit when it's negative, since by default it's -1 if not content-length and keeps substracting what was read from the file object. Also added a test case for this problem. (cherry picked from commit d8cf3514dd4682419a66f6e834bb384ee34afc95) Co-authored-by: roger Automerge-Triggered-By: @ned-deily files: A Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst M Lib/cgi.py M Lib/test/test_cgi.py diff --git a/Lib/cgi.py b/Lib/cgi.py index df84f1fe69cfb..5a001667efca8 100755 --- a/Lib/cgi.py +++ b/Lib/cgi.py @@ -217,7 +217,10 @@ def parse_multipart(fp, pdict, encoding="utf-8", errors="replace"): ctype = "multipart/form-data; boundary={}".format(boundary) headers = Message() headers.set_type(ctype) - headers['Content-Length'] = pdict['CONTENT-LENGTH'] + try: + headers['Content-Length'] = pdict['CONTENT-LENGTH'] + except KeyError: + pass fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors, environ={'REQUEST_METHOD': 'POST'}) return {k: fs.getlist(k) for k in fs} @@ -753,7 +756,8 @@ def read_lines_to_outerboundary(self): last_line_lfend = True _read = 0 while 1: - if self.limit is not None and _read >= self.limit: + + if self.limit is not None and 0 <= self.limit <= _read: break line = self.fp.readline(1<<16) # bytes self.bytes_read += len(line) diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py index b46be67f77329..220268e14f032 100644 --- a/Lib/test/test_cgi.py +++ b/Lib/test/test_cgi.py @@ -130,6 +130,20 @@ def test_parse_multipart(self): 'file': [b'Testing 123.\n'], 'title': ['']} self.assertEqual(result, expected) + def test_parse_multipart_without_content_length(self): + POSTDATA = '''--JfISa01 +Content-Disposition: form-data; name="submit-name" + +just a string + +--JfISa01-- +''' + fp = BytesIO(POSTDATA.encode('latin1')) + env = {'boundary': 'JfISa01'.encode('latin1')} + result = cgi.parse_multipart(fp, env) + expected = {'submit-name': ['just a string\n']} + self.assertEqual(result, expected) + def test_parse_multipart_invalid_encoding(self): BOUNDARY = "JfISa01" POSTDATA = """--JfISa01 diff --git a/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst b/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst new file mode 100644 index 0000000000000..2656b4bf22ae4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst @@ -0,0 +1 @@ +Fix `cgi.parse_multipart` without content_length. Patch by Roger Duran From webhook-mailer at python.org Mon Jun 15 11:51:40 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 15 Jun 2020 15:51:40 -0000 Subject: [Python-checkins] bpo-34226: fix cgi.parse_multipart without content_length (GH-8530) Message-ID: https://github.com/python/cpython/commit/c72b7f703eec45c1f4006bf5f65092daedaec46e commit: c72b7f703eec45c1f4006bf5f65092daedaec46e branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-15T08:51:35-07:00 summary: bpo-34226: fix cgi.parse_multipart without content_length (GH-8530) In Python 3.7 the behavior of parse_multipart changed requiring CONTENT-LENGTH header, this fix remove this header as required and fix FieldStorage read_lines_to_outerboundary, by not using limit when it's negative, since by default it's -1 if not content-length and keeps substracting what was read from the file object. Also added a test case for this problem. (cherry picked from commit d8cf3514dd4682419a66f6e834bb384ee34afc95) Co-authored-by: roger files: A Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst M Lib/cgi.py M Lib/test/test_cgi.py diff --git a/Lib/cgi.py b/Lib/cgi.py index c22c71b387851..77ab703cc0360 100755 --- a/Lib/cgi.py +++ b/Lib/cgi.py @@ -200,7 +200,10 @@ def parse_multipart(fp, pdict, encoding="utf-8", errors="replace"): ctype = "multipart/form-data; boundary={}".format(boundary) headers = Message() headers.set_type(ctype) - headers['Content-Length'] = pdict['CONTENT-LENGTH'] + try: + headers['Content-Length'] = pdict['CONTENT-LENGTH'] + except KeyError: + pass fs = FieldStorage(fp, headers=headers, encoding=encoding, errors=errors, environ={'REQUEST_METHOD': 'POST'}) return {k: fs.getlist(k) for k in fs} @@ -736,7 +739,8 @@ def read_lines_to_outerboundary(self): last_line_lfend = True _read = 0 while 1: - if self.limit is not None and _read >= self.limit: + + if self.limit is not None and 0 <= self.limit <= _read: break line = self.fp.readline(1<<16) # bytes self.bytes_read += len(line) diff --git a/Lib/test/test_cgi.py b/Lib/test/test_cgi.py index ab8677199f32e..101942de947fb 100644 --- a/Lib/test/test_cgi.py +++ b/Lib/test/test_cgi.py @@ -128,6 +128,20 @@ def test_parse_multipart(self): 'file': [b'Testing 123.\n'], 'title': ['']} self.assertEqual(result, expected) + def test_parse_multipart_without_content_length(self): + POSTDATA = '''--JfISa01 +Content-Disposition: form-data; name="submit-name" + +just a string + +--JfISa01-- +''' + fp = BytesIO(POSTDATA.encode('latin1')) + env = {'boundary': 'JfISa01'.encode('latin1')} + result = cgi.parse_multipart(fp, env) + expected = {'submit-name': ['just a string\n']} + self.assertEqual(result, expected) + def test_parse_multipart_invalid_encoding(self): BOUNDARY = "JfISa01" POSTDATA = """--JfISa01 diff --git a/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst b/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst new file mode 100644 index 0000000000000..2656b4bf22ae4 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst @@ -0,0 +1 @@ +Fix `cgi.parse_multipart` without content_length. Patch by Roger Duran From webhook-mailer at python.org Mon Jun 15 12:21:04 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Mon, 15 Jun 2020 16:21:04 -0000 Subject: [Python-checkins] bpo-1635741: Port _dbm module to multiphase initialization (GH-20848) Message-ID: https://github.com/python/cpython/commit/bf69a8f99f1b0e19a59509c6c4d7015a31d881a1 commit: bf69a8f99f1b0e19a59509c6c4d7015a31d881a1 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-16T01:20:54+09:00 summary: bpo-1635741: Port _dbm module to multiphase initialization (GH-20848) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-12-22-56-17.bpo-1635741.mmlp3Q.rst M Modules/_dbmmodule.c M Modules/clinic/_dbmmodule.c.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-12-22-56-17.bpo-1635741.mmlp3Q.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-22-56-17.bpo-1635741.mmlp3Q.rst new file mode 100644 index 0000000000000..ae12d25baa3ad --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-22-56-17.bpo-1635741.mmlp3Q.rst @@ -0,0 +1 @@ +Port :mod:`_dbm` to multiphase initialization. diff --git a/Modules/_dbmmodule.c b/Modules/_dbmmodule.c index 80a0503622c3f..97772a04d08fe 100644 --- a/Modules/_dbmmodule.c +++ b/Modules/_dbmmodule.c @@ -28,6 +28,19 @@ static const char which_dbm[] = "Berkeley DB"; #error "No ndbm.h available!" #endif +typedef struct { + PyTypeObject *dbm_type; + PyObject *dbm_error; +} _dbm_state; + +static inline _dbm_state* +get_dbm_state(PyObject *module) +{ + void *state = PyModule_GetState(module); + assert(state != NULL); + return (_dbm_state *)state; +} + /*[clinic input] module _dbm class _dbm.dbm "dbmobject *" "&Dbmtype" @@ -43,28 +56,25 @@ typedef struct { #include "clinic/_dbmmodule.c.h" -static PyTypeObject Dbmtype; - -#define is_dbmobject(v) Py_IS_TYPE(v, &Dbmtype) -#define check_dbmobject_open(v) if ((v)->di_dbm == NULL) \ - { PyErr_SetString(DbmError, "DBM object has already been closed"); \ - return NULL; } - -static PyObject *DbmError; +#define check_dbmobject_open(v, err) \ + if ((v)->di_dbm == NULL) { \ + PyErr_SetString(err, "DBM object has already been closed"); \ + return NULL; \ + } static PyObject * -newdbmobject(const char *file, int flags, int mode) +newdbmobject(_dbm_state *state, const char *file, int flags, int mode) { dbmobject *dp; - dp = PyObject_New(dbmobject, &Dbmtype); + dp = PyObject_New(dbmobject, state->dbm_type); if (dp == NULL) return NULL; dp->di_size = -1; dp->flags = flags; /* See issue #19296 */ if ( (dp->di_dbm = dbm_open((char *)file, flags, mode)) == 0 ) { - PyErr_SetFromErrnoWithFilename(DbmError, file); + PyErr_SetFromErrnoWithFilename(state->dbm_error, file); Py_DECREF(dp); return NULL; } @@ -76,16 +86,21 @@ newdbmobject(const char *file, int flags, int mode) static void dbm_dealloc(dbmobject *dp) { - if ( dp->di_dbm ) + if (dp->di_dbm) { dbm_close(dp->di_dbm); - PyObject_Del(dp); + } + PyTypeObject *tp = Py_TYPE(dp); + tp->tp_free(dp); + Py_DECREF(tp); } static Py_ssize_t dbm_length(dbmobject *dp) { + _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + assert(state != NULL); if (dp->di_dbm == NULL) { - PyErr_SetString(DbmError, "DBM object has already been closed"); + PyErr_SetString(state->dbm_error, "DBM object has already been closed"); return -1; } if ( dp->di_size < 0 ) { @@ -106,12 +121,14 @@ dbm_subscript(dbmobject *dp, PyObject *key) { datum drec, krec; Py_ssize_t tmp_size; - - if (!PyArg_Parse(key, "s#", &krec.dptr, &tmp_size) ) + _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + assert(state != NULL); + if (!PyArg_Parse(key, "s#", &krec.dptr, &tmp_size)) { return NULL; + } krec.dsize = tmp_size; - check_dbmobject_open(dp); + check_dbmobject_open(dp, state->dbm_error); drec = dbm_fetch(dp->di_dbm, krec); if ( drec.dptr == 0 ) { PyErr_SetObject(PyExc_KeyError, key); @@ -119,7 +136,7 @@ dbm_subscript(dbmobject *dp, PyObject *key) } if ( dbm_error(dp->di_dbm) ) { dbm_clearerr(dp->di_dbm); - PyErr_SetString(DbmError, ""); + PyErr_SetString(state->dbm_error, ""); return NULL; } return PyBytes_FromStringAndSize(drec.dptr, drec.dsize); @@ -136,9 +153,11 @@ dbm_ass_sub(dbmobject *dp, PyObject *v, PyObject *w) "dbm mappings have bytes or string keys only"); return -1; } + _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + assert(state != NULL); krec.dsize = tmp_size; if (dp->di_dbm == NULL) { - PyErr_SetString(DbmError, "DBM object has already been closed"); + PyErr_SetString(state->dbm_error, "DBM object has already been closed"); return -1; } dp->di_size = -1; @@ -151,7 +170,7 @@ dbm_ass_sub(dbmobject *dp, PyObject *v, PyObject *w) PyErr_SetObject(PyExc_KeyError, v); } else { - PyErr_SetString(DbmError, "cannot delete item from database"); + PyErr_SetString(state->dbm_error, "cannot delete item from database"); } return -1; } @@ -164,25 +183,19 @@ dbm_ass_sub(dbmobject *dp, PyObject *v, PyObject *w) drec.dsize = tmp_size; if ( dbm_store(dp->di_dbm, krec, drec, DBM_REPLACE) < 0 ) { dbm_clearerr(dp->di_dbm); - PyErr_SetString(DbmError, + PyErr_SetString(state->dbm_error, "cannot add item to database"); return -1; } } if ( dbm_error(dp->di_dbm) ) { dbm_clearerr(dp->di_dbm); - PyErr_SetString(DbmError, ""); + PyErr_SetString(state->dbm_error, ""); return -1; } return 0; } -static PyMappingMethods dbm_as_mapping = { - (lenfunc)dbm_length, /*mp_length*/ - (binaryfunc)dbm_subscript, /*mp_subscript*/ - (objobjargproc)dbm_ass_sub, /*mp_ass_subscript*/ -}; - /*[clinic input] _dbm.dbm.close @@ -193,8 +206,9 @@ static PyObject * _dbm_dbm_close_impl(dbmobject *self) /*[clinic end generated code: output=c8dc5b6709600b86 input=046db72377d51be8]*/ { - if (self->di_dbm) + if (self->di_dbm) { dbm_close(self->di_dbm); + } self->di_dbm = NULL; Py_RETURN_NONE; } @@ -202,21 +216,26 @@ _dbm_dbm_close_impl(dbmobject *self) /*[clinic input] _dbm.dbm.keys + cls: defining_class + Return a list of all keys in the database. [clinic start generated code]*/ static PyObject * -_dbm_dbm_keys_impl(dbmobject *self) -/*[clinic end generated code: output=434549f7c121b33c input=d210ba778cd9c68a]*/ +_dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=f2a593b3038e5996 input=d3706a28fc051097]*/ { PyObject *v, *item; datum key; int err; - check_dbmobject_open(self); + _dbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + check_dbmobject_open(self, state->dbm_error); v = PyList_New(0); - if (v == NULL) + if (v == NULL) { return NULL; + } for (key = dbm_firstkey(self->di_dbm); key.dptr; key = dbm_nextkey(self->di_dbm)) { item = PyBytes_FromStringAndSize(key.dptr, key.dsize); @@ -241,8 +260,10 @@ dbm_contains(PyObject *self, PyObject *arg) datum key, val; Py_ssize_t size; + _dbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); + assert(state != NULL); if ((dp)->di_dbm == NULL) { - PyErr_SetString(DbmError, + PyErr_SetString(state->dbm_error, "DBM object has already been closed"); return -1; } @@ -266,22 +287,9 @@ dbm_contains(PyObject *self, PyObject *arg) return val.dptr != NULL; } -static PySequenceMethods dbm_as_sequence = { - 0, /* sq_length */ - 0, /* sq_concat */ - 0, /* sq_repeat */ - 0, /* sq_item */ - 0, /* sq_slice */ - 0, /* sq_ass_item */ - 0, /* sq_ass_slice */ - dbm_contains, /* sq_contains */ - 0, /* sq_inplace_concat */ - 0, /* sq_inplace_repeat */ -}; - /*[clinic input] _dbm.dbm.get - + cls: defining_class key: str(accept={str, robuffer}, zeroes=True) default: object = None / @@ -290,19 +298,20 @@ Return the value for key if present, otherwise default. [clinic start generated code]*/ static PyObject * -_dbm_dbm_get_impl(dbmobject *self, const char *key, +_dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_clean_t key_length, PyObject *default_value) -/*[clinic end generated code: output=b44f95eba8203d93 input=b788eba0ffad2e91]*/ -/*[clinic end generated code: output=4f5c0e523eaf1251 input=9402c0af8582dc69]*/ +/*[clinic end generated code: output=34851b5dc1c664dc input=66b993b8349fa8c1]*/ { datum dbm_key, val; - + _dbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); dbm_key.dptr = (char *)key; dbm_key.dsize = key_length; - check_dbmobject_open(self); + check_dbmobject_open(self, state->dbm_error); val = dbm_fetch(self->di_dbm, dbm_key); - if (val.dptr != NULL) + if (val.dptr != NULL) { return PyBytes_FromStringAndSize(val.dptr, val.dsize); + } Py_INCREF(default_value); return default_value; @@ -310,6 +319,7 @@ _dbm_dbm_get_impl(dbmobject *self, const char *key, /*[clinic input] _dbm.dbm.setdefault + cls: defining_class key: str(accept={str, robuffer}, zeroes=True) default: object(c_default="NULL") = b'' / @@ -320,24 +330,27 @@ If key is not in the database, it is inserted with default as the value. [clinic start generated code]*/ static PyObject * -_dbm_dbm_setdefault_impl(dbmobject *self, const char *key, +_dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_clean_t key_length, PyObject *default_value) -/*[clinic end generated code: output=52545886cf272161 input=bf40c48edaca01d6]*/ +/*[clinic end generated code: output=d5c68fe673886767 input=126a3ff15c5f8232]*/ { datum dbm_key, val; Py_ssize_t tmp_size; - + _dbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); dbm_key.dptr = (char *)key; dbm_key.dsize = key_length; - check_dbmobject_open(self); + check_dbmobject_open(self, state->dbm_error); val = dbm_fetch(self->di_dbm, dbm_key); - if (val.dptr != NULL) + if (val.dptr != NULL) { return PyBytes_FromStringAndSize(val.dptr, val.dsize); + } if (default_value == NULL) { default_value = PyBytes_FromStringAndSize(NULL, 0); - if (default_value == NULL) + if (default_value == NULL) { return NULL; + } val.dptr = NULL; val.dsize = 0; } @@ -352,7 +365,7 @@ _dbm_dbm_setdefault_impl(dbmobject *self, const char *key, } if (dbm_store(self->di_dbm, dbm_key, val, DBM_INSERT) < 0) { dbm_clearerr(self->di_dbm); - PyErr_SetString(DbmError, "cannot add item to database"); + PyErr_SetString(state->dbm_error, "cannot add item to database"); Py_DECREF(default_value); return NULL; } @@ -373,7 +386,6 @@ dbm__exit__(PyObject *self, PyObject *args) return _PyObject_CallMethodIdNoArgs(self, &PyId_close); } - static PyMethodDef dbm_methods[] = { _DBM_DBM_CLOSE_METHODDEF _DBM_DBM_KEYS_METHODDEF @@ -381,38 +393,29 @@ static PyMethodDef dbm_methods[] = { _DBM_DBM_SETDEFAULT_METHODDEF {"__enter__", dbm__enter__, METH_NOARGS, NULL}, {"__exit__", dbm__exit__, METH_VARARGS, NULL}, - {NULL, NULL} /* sentinel */ + {NULL, NULL} /* sentinel */ }; -static PyTypeObject Dbmtype = { - PyVarObject_HEAD_INIT(NULL, 0) - "_dbm.dbm", - sizeof(dbmobject), - 0, - (destructor)dbm_dealloc, /*tp_dealloc*/ - 0, /*tp_vectorcall_offset*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_as_async*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - &dbm_as_sequence, /*tp_as_sequence*/ - &dbm_as_mapping, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT, /*tp_flags*/ - 0, /*tp_doc*/ - 0, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - dbm_methods, /*tp_methods*/ +static PyType_Slot dbmtype_spec_slots[] = { + {Py_tp_dealloc, dbm_dealloc}, + {Py_tp_methods, dbm_methods}, + {Py_sq_contains, dbm_contains}, + {Py_mp_length, dbm_length}, + {Py_mp_subscript, dbm_subscript}, + {Py_mp_ass_subscript, dbm_ass_sub}, + {0, 0} +}; + + +static PyType_Spec dbmtype_spec = { + .name = "_dbm.dbm", + .basicsize = sizeof(dbmobject), + // Calling PyType_GetModuleState() on a subclass is not safe. + // dbmtype_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = Py_TPFLAGS_DEFAULT, + .slots = dbmtype_spec_slots, }; /* ----------------------------------------------------------------- */ @@ -443,19 +446,26 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, /*[clinic end generated code: output=9527750f5df90764 input=376a9d903a50df59]*/ { int iflags; - - if ( strcmp(flags, "r") == 0 ) + _dbm_state *state = get_dbm_state(module); + assert(state != NULL); + if (strcmp(flags, "r") == 0) { iflags = O_RDONLY; - else if ( strcmp(flags, "w") == 0 ) + } + else if (strcmp(flags, "w") == 0) { iflags = O_RDWR; - else if ( strcmp(flags, "rw") == 0 ) /* B/W compat */ + } + else if (strcmp(flags, "rw") == 0) { + /* Backward compatibility */ iflags = O_RDWR|O_CREAT; - else if ( strcmp(flags, "c") == 0 ) + } + else if (strcmp(flags, "c") == 0) { iflags = O_RDWR|O_CREAT; - else if ( strcmp(flags, "n") == 0 ) + } + else if (strcmp(flags, "n") == 0) { iflags = O_RDWR|O_CREAT|O_TRUNC; + } else { - PyErr_SetString(DbmError, + PyErr_SetString(state->dbm_error, "arg 2 to open should be 'r', 'w', 'c', or 'n'"); return NULL; } @@ -470,7 +480,7 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, PyErr_SetString(PyExc_ValueError, "embedded null character"); return NULL; } - PyObject *self = newdbmobject(name, iflags, mode); + PyObject *self = newdbmobject(state, name, iflags, mode); Py_DECREF(filenamebytes); return self; } @@ -480,42 +490,70 @@ static PyMethodDef dbmmodule_methods[] = { { 0, 0 }, }; +static int +_dbm_exec(PyObject *module) +{ + _dbm_state *state = get_dbm_state(module); + state->dbm_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &dbmtype_spec, NULL); + if (state->dbm_type == NULL) { + return -1; + } + state->dbm_error = PyErr_NewException("_dbm.error", PyExc_OSError, NULL); + if (state->dbm_error == NULL) { + return -1; + } + if (PyModule_AddStringConstant(module, "library", which_dbm) < 0) { + return -1; + } + if (PyModule_AddType(module, (PyTypeObject *)state->dbm_error) < 0) { + return -1; + } + return 0; +} + +static int +_dbm_module_traverse(PyObject *module, visitproc visit, void *arg) +{ + _dbm_state *state = get_dbm_state(module); + Py_VISIT(state->dbm_error); + Py_VISIT(state->dbm_type); + return 0; +} + +static int +_dbm_module_clear(PyObject *module) +{ + _dbm_state *state = get_dbm_state(module); + Py_CLEAR(state->dbm_error); + Py_CLEAR(state->dbm_type); + return 0; +} + +static void +_dbm_module_free(void *module) +{ + _dbm_module_clear((PyObject *)module); +} + +static PyModuleDef_Slot _dbmmodule_slots[] = { + {Py_mod_exec, _dbm_exec}, + {0, NULL} +}; static struct PyModuleDef _dbmmodule = { PyModuleDef_HEAD_INIT, - "_dbm", - NULL, - -1, - dbmmodule_methods, - NULL, - NULL, - NULL, - NULL + .m_name = "_dbm", + .m_size = sizeof(_dbm_state), + .m_methods = dbmmodule_methods, + .m_slots = _dbmmodule_slots, + .m_traverse = _dbm_module_traverse, + .m_clear = _dbm_module_clear, + .m_free = _dbm_module_free, }; PyMODINIT_FUNC -PyInit__dbm(void) { - PyObject *m, *d, *s; - - if (PyType_Ready(&Dbmtype) < 0) - return NULL; - m = PyModule_Create(&_dbmmodule); - if (m == NULL) - return NULL; - d = PyModule_GetDict(m); - if (DbmError == NULL) - DbmError = PyErr_NewException("_dbm.error", - PyExc_OSError, NULL); - s = PyUnicode_FromString(which_dbm); - if (s != NULL) { - PyDict_SetItemString(d, "library", s); - Py_DECREF(s); - } - if (DbmError != NULL) - PyDict_SetItemString(d, "error", DbmError); - if (PyErr_Occurred()) { - Py_DECREF(m); - m = NULL; - } - return m; +PyInit__dbm(void) +{ + return PyModuleDef_Init(&_dbmmodule); } diff --git a/Modules/clinic/_dbmmodule.c.h b/Modules/clinic/_dbmmodule.c.h index edf29be92af9b..af288c2586a10 100644 --- a/Modules/clinic/_dbmmodule.c.h +++ b/Modules/clinic/_dbmmodule.c.h @@ -27,15 +27,26 @@ PyDoc_STRVAR(_dbm_dbm_keys__doc__, "Return a list of all keys in the database."); #define _DBM_DBM_KEYS_METHODDEF \ - {"keys", (PyCFunction)_dbm_dbm_keys, METH_NOARGS, _dbm_dbm_keys__doc__}, + {"keys", (PyCFunction)(void(*)(void))_dbm_dbm_keys, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _dbm_dbm_keys__doc__}, static PyObject * -_dbm_dbm_keys_impl(dbmobject *self); +_dbm_dbm_keys_impl(dbmobject *self, PyTypeObject *cls); static PyObject * -_dbm_dbm_keys(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_dbm_dbm_keys(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _dbm_dbm_keys_impl(self); + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":keys", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _dbm_dbm_keys_impl(self, cls); + +exit: + return return_value; } PyDoc_STRVAR(_dbm_dbm_get__doc__, @@ -45,25 +56,27 @@ PyDoc_STRVAR(_dbm_dbm_get__doc__, "Return the value for key if present, otherwise default."); #define _DBM_DBM_GET_METHODDEF \ - {"get", (PyCFunction)(void(*)(void))_dbm_dbm_get, METH_FASTCALL, _dbm_dbm_get__doc__}, + {"get", (PyCFunction)(void(*)(void))_dbm_dbm_get, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _dbm_dbm_get__doc__}, static PyObject * -_dbm_dbm_get_impl(dbmobject *self, const char *key, +_dbm_dbm_get_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_clean_t key_length, PyObject *default_value); static PyObject * -_dbm_dbm_get(dbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_dbm_dbm_get(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + static const char * const _keywords[] = {"", "", NULL}; + static _PyArg_Parser _parser = {"s#|O:get", _keywords, 0}; const char *key; Py_ssize_clean_t key_length; PyObject *default_value = Py_None; - if (!_PyArg_ParseStack(args, nargs, "s#|O:get", + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &key_length, &default_value)) { goto exit; } - return_value = _dbm_dbm_get_impl(self, key, key_length, default_value); + return_value = _dbm_dbm_get_impl(self, cls, key, key_length, default_value); exit: return return_value; @@ -78,26 +91,28 @@ PyDoc_STRVAR(_dbm_dbm_setdefault__doc__, "If key is not in the database, it is inserted with default as the value."); #define _DBM_DBM_SETDEFAULT_METHODDEF \ - {"setdefault", (PyCFunction)(void(*)(void))_dbm_dbm_setdefault, METH_FASTCALL, _dbm_dbm_setdefault__doc__}, + {"setdefault", (PyCFunction)(void(*)(void))_dbm_dbm_setdefault, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _dbm_dbm_setdefault__doc__}, static PyObject * -_dbm_dbm_setdefault_impl(dbmobject *self, const char *key, +_dbm_dbm_setdefault_impl(dbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_clean_t key_length, PyObject *default_value); static PyObject * -_dbm_dbm_setdefault(dbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_dbm_dbm_setdefault(dbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + static const char * const _keywords[] = {"", "", NULL}; + static _PyArg_Parser _parser = {"s#|O:setdefault", _keywords, 0}; const char *key; Py_ssize_clean_t key_length; PyObject *default_value = NULL; - if (!_PyArg_ParseStack(args, nargs, "s#|O:setdefault", + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, &key, &key_length, &default_value)) { goto exit; } - return_value = _dbm_dbm_setdefault_impl(self, key, key_length, default_value); + return_value = _dbm_dbm_setdefault_impl(self, cls, key, key_length, default_value); exit: return return_value; @@ -172,4 +187,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=ba4ff07b8c8bbfe4 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=6947b1115df66f7c input=a9049054013a1b77]*/ From webhook-mailer at python.org Mon Jun 15 13:28:57 2020 From: webhook-mailer at python.org (Krzysztof Konopko) Date: Mon, 15 Jun 2020 17:28:57 -0000 Subject: [Python-checkins] bpo-40448: ensurepip: Do not use cache (GH-19812) Message-ID: https://github.com/python/cpython/commit/4a3a682b12f93a03888e8b59f439bc5fe30d6055 commit: 4a3a682b12f93a03888e8b59f439bc5fe30d6055 branch: master author: Krzysztof Konopko committer: GitHub date: 2020-06-15T13:28:46-04:00 summary: bpo-40448: ensurepip: Do not use cache (GH-19812) ensurepip optionally installs or upgrades 'pip' and 'setuptools' using the version of those modules bundled with Python. The internal PIP installation routine by default temporarily uses its cache, if it exists. This is undesirable as Python builds and installations may be independent of the user running the build, whilst PIP cache location is dependent on the user's environment and outside of the build environment. At the same time, there's no value in using the cache while installing bundled modules. This change disables PIP caching when used in ensurepip. files: A Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst M Lib/ensurepip/__init__.py M Lib/test/test_ensurepip.py M Misc/ACKS diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 545fce656fd6f..d62b1187f90d6 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -119,7 +119,7 @@ def _bootstrap(*, root=None, upgrade=False, user=False, additional_paths.append(os.path.join(tmpdir, wheel_name)) # Construct the arguments to be passed to the pip command - args = ["install", "--no-index", "--find-links", tmpdir] + args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir] if root: args += ["--root", root] if upgrade: diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py index 8996689309285..4786d28f39a3d 100644 --- a/Lib/test/test_ensurepip.py +++ b/Lib/test/test_ensurepip.py @@ -40,7 +40,7 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "setuptools", "pip", ], unittest.mock.ANY, @@ -54,7 +54,7 @@ def test_bootstrapping_with_root(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--root", "/foo/bar/", "setuptools", "pip", ], @@ -66,7 +66,7 @@ def test_bootstrapping_with_user(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--user", "setuptools", "pip", ], unittest.mock.ANY, @@ -77,7 +77,7 @@ def test_bootstrapping_with_upgrade(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--upgrade", "setuptools", "pip", ], unittest.mock.ANY, @@ -88,7 +88,7 @@ def test_bootstrapping_with_verbosity_1(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-v", "setuptools", "pip", ], unittest.mock.ANY, @@ -99,7 +99,7 @@ def test_bootstrapping_with_verbosity_2(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-vv", "setuptools", "pip", ], unittest.mock.ANY, @@ -110,7 +110,7 @@ def test_bootstrapping_with_verbosity_3(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-vvv", "setuptools", "pip", ], unittest.mock.ANY, @@ -260,7 +260,7 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "setuptools", "pip", ], unittest.mock.ANY, diff --git a/Misc/ACKS b/Misc/ACKS index a505a3d784036..0fc1954a22237 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -910,6 +910,7 @@ Vajrasky Kok Guido Kollerie Jacek Ko?odziej Jacek Konieczny +Krzysztof Konopko Arkady Koplyarov Peter A. Koren ???? ????????? diff --git a/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst b/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst new file mode 100644 index 0000000000000..a755c5faa671c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst @@ -0,0 +1,2 @@ +:mod:`ensurepip` now disables the use of `pip` cache when installing the +bundled versions of `pip` and `setuptools`. Patch by Krzysztof Konopko. From webhook-mailer at python.org Mon Jun 15 13:44:41 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 15 Jun 2020 17:44:41 -0000 Subject: [Python-checkins] bpo-40448: ensurepip: Do not use cache (GH-19812) Message-ID: https://github.com/python/cpython/commit/b46beb25e4cf213dbf46a0a0cf3f0ed134894f7d commit: b46beb25e4cf213dbf46a0a0cf3f0ed134894f7d branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-15T10:44:34-07:00 summary: bpo-40448: ensurepip: Do not use cache (GH-19812) ensurepip optionally installs or upgrades 'pip' and 'setuptools' using the version of those modules bundled with Python. The internal PIP installation routine by default temporarily uses its cache, if it exists. This is undesirable as Python builds and installations may be independent of the user running the build, whilst PIP cache location is dependent on the user's environment and outside of the build environment. At the same time, there's no value in using the cache while installing bundled modules. This change disables PIP caching when used in ensurepip. (cherry picked from commit 4a3a682b12f93a03888e8b59f439bc5fe30d6055) Co-authored-by: Krzysztof Konopko files: A Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst M Lib/ensurepip/__init__.py M Lib/test/test_ensurepip.py M Misc/ACKS diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 948f34092435a..7c5baf0b5f1a7 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -114,7 +114,7 @@ def _bootstrap(*, root=None, upgrade=False, user=False, additional_paths.append(os.path.join(tmpdir, wheel_name)) # Construct the arguments to be passed to the pip command - args = ["install", "--no-index", "--find-links", tmpdir] + args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir] if root: args += ["--root", root] if upgrade: diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py index 8996689309285..4786d28f39a3d 100644 --- a/Lib/test/test_ensurepip.py +++ b/Lib/test/test_ensurepip.py @@ -40,7 +40,7 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "setuptools", "pip", ], unittest.mock.ANY, @@ -54,7 +54,7 @@ def test_bootstrapping_with_root(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--root", "/foo/bar/", "setuptools", "pip", ], @@ -66,7 +66,7 @@ def test_bootstrapping_with_user(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--user", "setuptools", "pip", ], unittest.mock.ANY, @@ -77,7 +77,7 @@ def test_bootstrapping_with_upgrade(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--upgrade", "setuptools", "pip", ], unittest.mock.ANY, @@ -88,7 +88,7 @@ def test_bootstrapping_with_verbosity_1(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-v", "setuptools", "pip", ], unittest.mock.ANY, @@ -99,7 +99,7 @@ def test_bootstrapping_with_verbosity_2(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-vv", "setuptools", "pip", ], unittest.mock.ANY, @@ -110,7 +110,7 @@ def test_bootstrapping_with_verbosity_3(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-vvv", "setuptools", "pip", ], unittest.mock.ANY, @@ -260,7 +260,7 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "setuptools", "pip", ], unittest.mock.ANY, diff --git a/Misc/ACKS b/Misc/ACKS index a6da4f1289828..a893fdce0e9ac 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -860,6 +860,7 @@ Vajrasky Kok Guido Kollerie Jacek Ko?odziej Jacek Konieczny +Krzysztof Konopko Arkady Koplyarov Peter A. Koren ???? ????????? diff --git a/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst b/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst new file mode 100644 index 0000000000000..a755c5faa671c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst @@ -0,0 +1,2 @@ +:mod:`ensurepip` now disables the use of `pip` cache when installing the +bundled versions of `pip` and `setuptools`. Patch by Krzysztof Konopko. From webhook-mailer at python.org Mon Jun 15 13:45:26 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 15 Jun 2020 17:45:26 -0000 Subject: [Python-checkins] bpo-40448: ensurepip: Do not use cache (GH-19812) Message-ID: https://github.com/python/cpython/commit/a1d3be4623c8ec7069bd34ccdce336be9cdeb644 commit: a1d3be4623c8ec7069bd34ccdce336be9cdeb644 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-15T10:45:21-07:00 summary: bpo-40448: ensurepip: Do not use cache (GH-19812) ensurepip optionally installs or upgrades 'pip' and 'setuptools' using the version of those modules bundled with Python. The internal PIP installation routine by default temporarily uses its cache, if it exists. This is undesirable as Python builds and installations may be independent of the user running the build, whilst PIP cache location is dependent on the user's environment and outside of the build environment. At the same time, there's no value in using the cache while installing bundled modules. This change disables PIP caching when used in ensurepip. (cherry picked from commit 4a3a682b12f93a03888e8b59f439bc5fe30d6055) Co-authored-by: Krzysztof Konopko files: A Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst M Lib/ensurepip/__init__.py M Lib/test/test_ensurepip.py M Misc/ACKS diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 566fb2a096bcf..ecfaee5640ea9 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -116,7 +116,7 @@ def _bootstrap(*, root=None, upgrade=False, user=False, additional_paths.append(os.path.join(tmpdir, wheel_name)) # Construct the arguments to be passed to the pip command - args = ["install", "--no-index", "--find-links", tmpdir] + args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir] if root: args += ["--root", root] if upgrade: diff --git a/Lib/test/test_ensurepip.py b/Lib/test/test_ensurepip.py index 8996689309285..4786d28f39a3d 100644 --- a/Lib/test/test_ensurepip.py +++ b/Lib/test/test_ensurepip.py @@ -40,7 +40,7 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "setuptools", "pip", ], unittest.mock.ANY, @@ -54,7 +54,7 @@ def test_bootstrapping_with_root(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--root", "/foo/bar/", "setuptools", "pip", ], @@ -66,7 +66,7 @@ def test_bootstrapping_with_user(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--user", "setuptools", "pip", ], unittest.mock.ANY, @@ -77,7 +77,7 @@ def test_bootstrapping_with_upgrade(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "--upgrade", "setuptools", "pip", ], unittest.mock.ANY, @@ -88,7 +88,7 @@ def test_bootstrapping_with_verbosity_1(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-v", "setuptools", "pip", ], unittest.mock.ANY, @@ -99,7 +99,7 @@ def test_bootstrapping_with_verbosity_2(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-vv", "setuptools", "pip", ], unittest.mock.ANY, @@ -110,7 +110,7 @@ def test_bootstrapping_with_verbosity_3(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "-vvv", "setuptools", "pip", ], unittest.mock.ANY, @@ -260,7 +260,7 @@ def test_basic_bootstrapping(self): self.run_pip.assert_called_once_with( [ - "install", "--no-index", "--find-links", + "install", "--no-cache-dir", "--no-index", "--find-links", unittest.mock.ANY, "setuptools", "pip", ], unittest.mock.ANY, diff --git a/Misc/ACKS b/Misc/ACKS index e85e370c98cc7..8098637a32c5d 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -885,6 +885,7 @@ Vajrasky Kok Guido Kollerie Jacek Ko?odziej Jacek Konieczny +Krzysztof Konopko Arkady Koplyarov Peter A. Koren ???? ????????? diff --git a/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst b/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst new file mode 100644 index 0000000000000..a755c5faa671c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst @@ -0,0 +1,2 @@ +:mod:`ensurepip` now disables the use of `pip` cache when installing the +bundled versions of `pip` and `setuptools`. Patch by Krzysztof Konopko. From webhook-mailer at python.org Mon Jun 15 15:16:58 2020 From: webhook-mailer at python.org (Xavier Fernandez) Date: Mon, 15 Jun 2020 19:16:58 -0000 Subject: [Python-checkins] bpo-38488: Upgrade bundled versions of pip & setuptools (GH-20491) Message-ID: https://github.com/python/cpython/commit/5f79f46612c351bde78a41c5264c42db21008868 commit: 5f79f46612c351bde78a41c5264c42db21008868 branch: master author: Xavier Fernandez committer: GitHub date: 2020-06-15T15:16:48-04:00 summary: bpo-38488: Upgrade bundled versions of pip & setuptools (GH-20491) files: A Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl A Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl A Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst D Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl D Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl M Lib/ensurepip/__init__.py M Lib/test/test_venv.py diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index d62b1187f90d6..21320a83198ca 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -12,13 +12,13 @@ __all__ = ["version", "bootstrap"] -_SETUPTOOLS_VERSION = "41.2.0" +_SETUPTOOLS_VERSION = "47.1.0" -_PIP_VERSION = "19.2.3" +_PIP_VERSION = "20.1.1" _PROJECTS = [ - ("setuptools", _SETUPTOOLS_VERSION), - ("pip", _PIP_VERSION), + ("setuptools", _SETUPTOOLS_VERSION, "py3"), + ("pip", _PIP_VERSION, "py2.py3"), ] @@ -107,8 +107,8 @@ def _bootstrap(*, root=None, upgrade=False, user=False, # Put our bundled wheels into a temporary directory and construct the # additional paths that need added to sys.path additional_paths = [] - for project, version in _PROJECTS: - wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) + for project, version, py_tag in _PROJECTS: + wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag) whl = resources.read_binary( _bundled, wheel_name, diff --git a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl deleted file mode 100644 index 8118df8ac1940..0000000000000 Binary files a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl and /dev/null differ diff --git a/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl new file mode 100644 index 0000000000000..ea1d0f7c8604a Binary files /dev/null and b/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl differ diff --git a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl similarity index 65% rename from Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl rename to Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl index 82df6f63f4ee9..f87867ff98254 100644 Binary files a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl and b/Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl differ diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 44c62193bf7cc..ef6d7bd5ad7da 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -513,7 +513,7 @@ def do_test_with_pip(self, system_site_packages): # executing pip with sudo, you may want sudo's -H flag." # where $HOME is replaced by the HOME environment variable. err = re.sub("^(WARNING: )?The directory .* or its parent directory " - "is not owned by the current user .*$", "", + "is not owned or is not writable by the current user.*$", "", err, flags=re.MULTILINE) self.assertEqual(err.rstrip(), "") # Being fairly specific regarding the expected behaviour for the diff --git a/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst b/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst new file mode 100644 index 0000000000000..c44da9fecb605 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst @@ -0,0 +1 @@ +Update ensurepip to install pip 20.1.1 and setuptools 47.1.0. From webhook-mailer at python.org Mon Jun 15 15:59:55 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Jun 2020 19:59:55 -0000 Subject: [Python-checkins] bpo-36020: Remove snprintf macro in pyerrors.h (GH-20889) Message-ID: https://github.com/python/cpython/commit/e822e37946f27c09953bb5733acf3b07c2db690f commit: e822e37946f27c09953bb5733acf3b07c2db690f branch: master author: Victor Stinner committer: GitHub date: 2020-06-15T21:59:47+02:00 summary: bpo-36020: Remove snprintf macro in pyerrors.h (GH-20889) On Windows, #include "pyerrors.h" no longer defines "snprintf" and "vsnprintf" macros. PyOS_snprintf() and PyOS_vsnprintf() should be used to get portable behavior. Replace snprintf() calls with PyOS_snprintf() and replace vsnprintf() calls with PyOS_vsnprintf(). files: A Misc/NEWS.d/next/C API/2020-06-15-16-46-01.bpo-36020.djI6jw.rst M Include/pyerrors.h M Modules/_ctypes/callbacks.c M Modules/socketmodule.c M Parser/tokenizer.c M Python/mysnprintf.c diff --git a/Include/pyerrors.h b/Include/pyerrors.h index 399bb7c3a6fac..979a26ba68a03 100644 --- a/Include/pyerrors.h +++ b/Include/pyerrors.h @@ -4,6 +4,8 @@ extern "C" { #endif +#include // va_list + /* Error handling definitions */ PyAPI_FUNC(void) PyErr_SetNone(PyObject *); @@ -307,21 +309,6 @@ PyAPI_FUNC(int) PyUnicodeTranslateError_SetReason( const char *reason /* UTF-8 encoded string */ ); -/* These APIs aren't really part of the error implementation, but - often needed to format error messages; the native C lib APIs are - not available on all platforms, which is why we provide emulations - for those platforms in Python/mysnprintf.c, - WARNING: The return value of snprintf varies across platforms; do - not rely on any particular behavior; eventually the C99 defn may - be reliable. -*/ -#if defined(MS_WIN32) && !defined(HAVE_SNPRINTF) -# define HAVE_SNPRINTF -# define snprintf _snprintf -# define vsnprintf _vsnprintf -#endif - -#include PyAPI_FUNC(int) PyOS_snprintf(char *str, size_t size, const char *format, ...) Py_GCC_ATTRIBUTE((format(printf, 3, 4))); PyAPI_FUNC(int) PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) diff --git a/Misc/NEWS.d/next/C API/2020-06-15-16-46-01.bpo-36020.djI6jw.rst b/Misc/NEWS.d/next/C API/2020-06-15-16-46-01.bpo-36020.djI6jw.rst new file mode 100644 index 0000000000000..1f91dce4608d3 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-15-16-46-01.bpo-36020.djI6jw.rst @@ -0,0 +1,2 @@ +On Windows, ``#include "pyerrors.h"`` no longer defines ``snprintf`` and +``vsnprintf`` macros. diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c index 29e8fac8c9496..2abfa67cdc06b 100644 --- a/Modules/_ctypes/callbacks.c +++ b/Modules/_ctypes/callbacks.c @@ -84,7 +84,7 @@ PrintError(const char *msg, ...) va_list marker; va_start(marker, msg); - vsnprintf(buf, sizeof(buf), msg, marker); + PyOS_vsnprintf(buf, sizeof(buf), msg, marker); va_end(marker); if (f != NULL && f != Py_None) PyFile_WriteString(buf, f); diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c index f60a27ebe408c..db0eeaafeec27 100644 --- a/Modules/socketmodule.c +++ b/Modules/socketmodule.c @@ -436,13 +436,12 @@ remove_unusable_flags(PyObject *m) #endif #ifdef MS_WIN32 -#undef EAFNOSUPPORT -#define EAFNOSUPPORT WSAEAFNOSUPPORT -#define snprintf _snprintf +# undef EAFNOSUPPORT +# define EAFNOSUPPORT WSAEAFNOSUPPORT #endif #ifndef SOCKETCLOSE -#define SOCKETCLOSE close +# define SOCKETCLOSE close #endif #if (defined(HAVE_BLUETOOTH_H) || defined(HAVE_BLUETOOTH_BLUETOOTH_H)) && !defined(__NetBSD__) && !defined(__DragonFly__) diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c index d461e4e24e721..f3c1d9b20ade1 100644 --- a/Parser/tokenizer.c +++ b/Parser/tokenizer.c @@ -1133,7 +1133,7 @@ verify_identifier(struct tok_state *tok) Py_DECREF(s); // PyUnicode_FromFormatV() does not support %X char hex[9]; - snprintf(hex, sizeof(hex), "%04X", ch); + (void)PyOS_snprintf(hex, sizeof(hex), "%04X", ch); if (Py_UNICODE_ISPRINTABLE(ch)) { syntaxerror(tok, "invalid character '%c' (U+%s)", ch, hex); } diff --git a/Python/mysnprintf.c b/Python/mysnprintf.c index 945a81abb01c2..458ca14d5c611 100644 --- a/Python/mysnprintf.c +++ b/Python/mysnprintf.c @@ -1,6 +1,8 @@ #include "Python.h" -/* snprintf() wrappers. If the platform has vsnprintf, we use it, else we +/* snprintf() and vsnprintf() wrappers. + + If the platform has vsnprintf, we use it, else we emulate it in a half-hearted way. Even if the platform has it, we wrap it because platforms differ in what vsnprintf does in case the buffer is too small: C99 behavior is to return the number of characters that @@ -52,16 +54,17 @@ PyOS_snprintf(char *str, size_t size, const char *format, ...) int PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) { + assert(str != NULL); + assert(size > 0); + assert(format != NULL); + int len; /* # bytes written, excluding \0 */ -#ifdef HAVE_SNPRINTF -#define _PyOS_vsnprintf_EXTRA_SPACE 1 +#if defined(_MSC_VER) || defined(HAVE_SNPRINTF) +# define _PyOS_vsnprintf_EXTRA_SPACE 1 #else -#define _PyOS_vsnprintf_EXTRA_SPACE 512 +# define _PyOS_vsnprintf_EXTRA_SPACE 512 char *buffer; #endif - assert(str != NULL); - assert(size > 0); - assert(format != NULL); /* We take a size_t as input but return an int. Sanity check * our input so that it won't cause an overflow in the * vsnprintf return value or the buffer malloc size. */ @@ -70,10 +73,12 @@ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) goto Done; } -#ifdef HAVE_SNPRINTF +#if defined(_MSC_VER) + len = _vsnprintf(str, size, format, va); +#elif defined(HAVE_SNPRINTF) len = vsnprintf(str, size, format, va); #else - /* Emulate it. */ + /* Emulate vsnprintf(). */ buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE); if (buffer == NULL) { len = -666; @@ -96,9 +101,11 @@ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) } PyMem_FREE(buffer); #endif + Done: - if (size > 0) + if (size > 0) { str[size-1] = '\0'; + } return len; #undef _PyOS_vsnprintf_EXTRA_SPACE } From webhook-mailer at python.org Mon Jun 15 17:42:31 2020 From: webhook-mailer at python.org (Ned Deily) Date: Mon, 15 Jun 2020 21:42:31 -0000 Subject: [Python-checkins] bpo-38488: Upgrade bundled versions of pip & setuptools (GH-20491) (GH-20900) Message-ID: https://github.com/python/cpython/commit/e63cc2f64668bd1d4581f8efa7089af7e08863b8 commit: e63cc2f64668bd1d4581f8efa7089af7e08863b8 branch: 3.8 author: Ned Deily committer: GitHub date: 2020-06-15T17:42:22-04:00 summary: bpo-38488: Upgrade bundled versions of pip & setuptools (GH-20491) (GH-20900) Co-authored-by: Xavier Fernandez files: A Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl A Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl A Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst D Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl D Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl M Lib/ensurepip/__init__.py M Lib/test/test_venv.py diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index ecfaee5640ea9..f3152a55d4430 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -9,13 +9,13 @@ __all__ = ["version", "bootstrap"] -_SETUPTOOLS_VERSION = "41.2.0" +_SETUPTOOLS_VERSION = "47.1.0" -_PIP_VERSION = "19.2.3" +_PIP_VERSION = "20.1.1" _PROJECTS = [ - ("setuptools", _SETUPTOOLS_VERSION), - ("pip", _PIP_VERSION), + ("setuptools", _SETUPTOOLS_VERSION, "py3"), + ("pip", _PIP_VERSION, "py2.py3"), ] @@ -104,8 +104,8 @@ def _bootstrap(*, root=None, upgrade=False, user=False, # Put our bundled wheels into a temporary directory and construct the # additional paths that need added to sys.path additional_paths = [] - for project, version in _PROJECTS: - wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) + for project, version, py_tag in _PROJECTS: + wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag) whl = pkgutil.get_data( "ensurepip", "_bundled/{}".format(wheel_name), diff --git a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl deleted file mode 100644 index 8118df8ac1940..0000000000000 Binary files a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl and /dev/null differ diff --git a/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl new file mode 100644 index 0000000000000..ea1d0f7c8604a Binary files /dev/null and b/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl differ diff --git a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl similarity index 65% rename from Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl rename to Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl index 82df6f63f4ee9..f87867ff98254 100644 Binary files a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl and b/Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl differ diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index bc4e95f2b1ecb..7e05138a80dc8 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -480,7 +480,7 @@ def do_test_with_pip(self, system_site_packages): # executing pip with sudo, you may want sudo's -H flag." # where $HOME is replaced by the HOME environment variable. err = re.sub("^(WARNING: )?The directory .* or its parent directory " - "is not owned by the current user .*$", "", + "is not owned or is not writable by the current user.*$", "", err, flags=re.MULTILINE) self.assertEqual(err.rstrip(), "") # Being fairly specific regarding the expected behaviour for the diff --git a/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst b/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst new file mode 100644 index 0000000000000..c44da9fecb605 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst @@ -0,0 +1 @@ +Update ensurepip to install pip 20.1.1 and setuptools 47.1.0. From webhook-mailer at python.org Mon Jun 15 18:01:28 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 15 Jun 2020 22:01:28 -0000 Subject: [Python-checkins] bpo-38488: Upgrade bundled versions of pip & setuptools (GH-20491) (GH-20900) Message-ID: https://github.com/python/cpython/commit/678ffc42813c9fcf36324ba378632b8a3fc18fb6 commit: 678ffc42813c9fcf36324ba378632b8a3fc18fb6 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-15T15:01:19-07:00 summary: bpo-38488: Upgrade bundled versions of pip & setuptools (GH-20491) (GH-20900) Co-authored-by: Xavier Fernandez (cherry picked from commit e63cc2f64668bd1d4581f8efa7089af7e08863b8) Co-authored-by: Ned Deily files: A Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl A Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl A Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst D Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl D Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl M Lib/ensurepip/__init__.py M Lib/test/test_venv.py diff --git a/Lib/ensurepip/__init__.py b/Lib/ensurepip/__init__.py index 7c5baf0b5f1a7..94d40b0c8d010 100644 --- a/Lib/ensurepip/__init__.py +++ b/Lib/ensurepip/__init__.py @@ -9,13 +9,13 @@ __all__ = ["version", "bootstrap"] -_SETUPTOOLS_VERSION = "41.2.0" +_SETUPTOOLS_VERSION = "47.1.0" -_PIP_VERSION = "19.2.3" +_PIP_VERSION = "20.1.1" _PROJECTS = [ - ("setuptools", _SETUPTOOLS_VERSION), - ("pip", _PIP_VERSION), + ("setuptools", _SETUPTOOLS_VERSION, "py3"), + ("pip", _PIP_VERSION, "py2.py3"), ] @@ -102,8 +102,8 @@ def _bootstrap(*, root=None, upgrade=False, user=False, # Put our bundled wheels into a temporary directory and construct the # additional paths that need added to sys.path additional_paths = [] - for project, version in _PROJECTS: - wheel_name = "{}-{}-py2.py3-none-any.whl".format(project, version) + for project, version, py_tag in _PROJECTS: + wheel_name = "{}-{}-{}-none-any.whl".format(project, version, py_tag) whl = pkgutil.get_data( "ensurepip", "_bundled/{}".format(wheel_name), diff --git a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl deleted file mode 100644 index 8118df8ac1940..0000000000000 Binary files a/Lib/ensurepip/_bundled/pip-19.2.3-py2.py3-none-any.whl and /dev/null differ diff --git a/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl new file mode 100644 index 0000000000000..ea1d0f7c8604a Binary files /dev/null and b/Lib/ensurepip/_bundled/pip-20.1.1-py2.py3-none-any.whl differ diff --git a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl b/Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl similarity index 65% rename from Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl rename to Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl index 82df6f63f4ee9..f87867ff98254 100644 Binary files a/Lib/ensurepip/_bundled/setuptools-41.2.0-py2.py3-none-any.whl and b/Lib/ensurepip/_bundled/setuptools-47.1.0-py3-none-any.whl differ diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 5f00c7ba97a36..a1fc6759d8d00 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -471,7 +471,7 @@ def do_test_with_pip(self, system_site_packages): # executing pip with sudo, you may want sudo's -H flag." # where $HOME is replaced by the HOME environment variable. err = re.sub("^(WARNING: )?The directory .* or its parent directory " - "is not owned by the current user .*$", "", + "is not owned or is not writable by the current user.*$", "", err, flags=re.MULTILINE) self.assertEqual(err.rstrip(), "") # Being fairly specific regarding the expected behaviour for the diff --git a/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst b/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst new file mode 100644 index 0000000000000..c44da9fecb605 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst @@ -0,0 +1 @@ +Update ensurepip to install pip 20.1.1 and setuptools 47.1.0. From webhook-mailer at python.org Mon Jun 15 18:54:49 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Jun 2020 22:54:49 -0000 Subject: [Python-checkins] bpo-36020: Require vsnprintf() to build Python (GH-20899) Message-ID: https://github.com/python/cpython/commit/7ab92d54b5d4440d84f6c02b4bc5a70103eff915 commit: 7ab92d54b5d4440d84f6c02b4bc5a70103eff915 branch: master author: Victor Stinner committer: GitHub date: 2020-06-16T00:54:44+02:00 summary: bpo-36020: Require vsnprintf() to build Python (GH-20899) The C99 functions snprintf() and vsnprintf() are now required to build Python. PyOS_snprintf() and PyOS_vsnprintf() no longer call Py_FatalError(). Previously, they called Py_FatalError() on a buffer overflow on platforms which don't provide vsnprintf(). files: A Misc/NEWS.d/next/Build/2020-06-15-22-14-25.bpo-36020.wbiv0P.rst M Doc/c-api/conversion.rst M Doc/whatsnew/3.10.rst M Python/mysnprintf.c diff --git a/Doc/c-api/conversion.rst b/Doc/c-api/conversion.rst index b310fcb5e4f91..efbaa52e2dc46 100644 --- a/Doc/c-api/conversion.rst +++ b/Doc/c-api/conversion.rst @@ -27,12 +27,8 @@ not. The wrappers ensure that *str*[*size*-1] is always ``'\0'`` upon return. They never write more than *size* bytes (including the trailing ``'\0'``) into str. -Both functions require that ``str != NULL``, ``size > 0`` and ``format != -NULL``. - -If the platform doesn't have :c:func:`vsnprintf` and the buffer size needed to -avoid truncation exceeds *size* by more than 512 bytes, Python aborts with a -:c:func:`Py_FatalError`. +Both functions require that ``str != NULL``, ``size > 0``, ``format != NULL`` +and ``size < INT_MAX``. The return value (*rv*) for these functions should be interpreted as follows: @@ -48,8 +44,8 @@ The return value (*rv*) for these functions should be interpreted as follows: this case too, but the rest of *str* is undefined. The exact cause of the error depends on the underlying platform. -The following functions provide locale-independent string to number conversions. +The following functions provide locale-independent string to number conversions. .. c:function:: double PyOS_string_to_double(const char *s, char **endptr, PyObject *overflow_exception) diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 629909b79e2aa..9878f7f81ceda 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -123,6 +123,10 @@ that may require changes to your code. Build Changes ============= +* The C99 functions :c:func:`snprintf` and :c:func:`vsnprintf` are now required + to build Python. + (Contributed by Victor Stinner in :issue:`36020`.) + C API Changes ============= diff --git a/Misc/NEWS.d/next/Build/2020-06-15-22-14-25.bpo-36020.wbiv0P.rst b/Misc/NEWS.d/next/Build/2020-06-15-22-14-25.bpo-36020.wbiv0P.rst new file mode 100644 index 0000000000000..de50dff3b1d27 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-06-15-22-14-25.bpo-36020.wbiv0P.rst @@ -0,0 +1,2 @@ +The C99 functions :c:func:`snprintf` and :c:func:`vsnprintf` are now required +to build Python. diff --git a/Python/mysnprintf.c b/Python/mysnprintf.c index 458ca14d5c611..cd69198011e3c 100644 --- a/Python/mysnprintf.c +++ b/Python/mysnprintf.c @@ -15,10 +15,6 @@ PyOS_snprintf and PyOS_vsnprintf never write more than size bytes (including the trailing '\0') into str. - If the platform doesn't have vsnprintf, and the buffer size needed to - avoid truncation exceeds size by more than 512, Python aborts with a - Py_FatalError. - Return value (rv): When 0 <= rv < size, the output conversion was unexceptional, and @@ -37,6 +33,7 @@ PyMem_Malloc couldn't obtain space for a temp buffer. CAUTION: Unlike C99, str != NULL and size > 0 are required. + Also, size must be smaller than INT_MAX. */ int @@ -56,50 +53,22 @@ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) { assert(str != NULL); assert(size > 0); + assert(size <= (INT_MAX - 1)); assert(format != NULL); int len; /* # bytes written, excluding \0 */ -#if defined(_MSC_VER) || defined(HAVE_SNPRINTF) -# define _PyOS_vsnprintf_EXTRA_SPACE 1 -#else -# define _PyOS_vsnprintf_EXTRA_SPACE 512 - char *buffer; -#endif /* We take a size_t as input but return an int. Sanity check * our input so that it won't cause an overflow in the - * vsnprintf return value or the buffer malloc size. */ - if (size > INT_MAX - _PyOS_vsnprintf_EXTRA_SPACE) { + * vsnprintf return value. */ + if (size > INT_MAX - 1) { len = -666; goto Done; } #if defined(_MSC_VER) len = _vsnprintf(str, size, format, va); -#elif defined(HAVE_SNPRINTF) - len = vsnprintf(str, size, format, va); #else - /* Emulate vsnprintf(). */ - buffer = PyMem_MALLOC(size + _PyOS_vsnprintf_EXTRA_SPACE); - if (buffer == NULL) { - len = -666; - goto Done; - } - - len = vsprintf(buffer, format, va); - if (len < 0) { - /* ignore the error */; - } - else if ((size_t)len >= size + _PyOS_vsnprintf_EXTRA_SPACE) { - _Py_FatalErrorFunc(__func__, "Buffer overflow"); - } - else { - const size_t to_copy = (size_t)len < size ? - (size_t)len : size - 1; - assert(to_copy < size); - memcpy(str, buffer, to_copy); - str[to_copy] = '\0'; - } - PyMem_FREE(buffer); + len = vsnprintf(str, size, format, va); #endif Done: @@ -107,5 +76,4 @@ PyOS_vsnprintf(char *str, size_t size, const char *format, va_list va) str[size-1] = '\0'; } return len; -#undef _PyOS_vsnprintf_EXTRA_SPACE } From webhook-mailer at python.org Mon Jun 15 19:28:15 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 15 Jun 2020 23:28:15 -0000 Subject: [Python-checkins] bpo-40989: PyObject_INIT() becomes an alias to PyObject_Init() (GH-20901) Message-ID: https://github.com/python/cpython/commit/04fc4f2a46b2fd083639deb872c3a3037fdb47d6 commit: 04fc4f2a46b2fd083639deb872c3a3037fdb47d6 branch: master author: Victor Stinner committer: GitHub date: 2020-06-16T01:28:07+02:00 summary: bpo-40989: PyObject_INIT() becomes an alias to PyObject_Init() (GH-20901) The PyObject_INIT() and PyObject_INIT_VAR() macros become aliases to, respectively, PyObject_Init() and PyObject_InitVar() functions. Rename _PyObject_INIT() and _PyObject_INIT_VAR() static inline functions to, respectively, _PyObject_Init() and _PyObject_InitVar(), and move them to pycore_object.h. Remove their return value: their return type becomes void. The _datetime module is now built with the Py_BUILD_CORE_MODULE macro defined. Remove an outdated comment on _Py_tracemalloc_config. files: A Misc/NEWS.d/next/C API/2020-06-15-23-17-51.bpo-40989.tlzG3r.rst M Include/cpython/objimpl.h M Include/internal/pycore_object.h M Include/internal/pycore_pymem.h M Include/objimpl.h M Modules/_datetimemodule.c M Modules/gcmodule.c M Objects/bytesobject.c M Objects/complexobject.c M Objects/floatobject.c M Objects/longobject.c M Objects/object.c M Objects/tupleobject.c M Objects/typeobject.c M Objects/unicodeobject.c M PC/winreg.c M setup.py diff --git a/Include/cpython/objimpl.h b/Include/cpython/objimpl.h index ca4009bcdb4c1..15999a239f7a9 100644 --- a/Include/cpython/objimpl.h +++ b/Include/cpython/objimpl.h @@ -37,8 +37,9 @@ PyObject *op; op = (PyObject *) Your_Allocator(_PyObject_SIZE(YourTypeStruct)); - if (op == NULL) - return PyErr_NoMemory(); + if (op == NULL) { + return PyErr_NoMemory(); + } PyObject_Init(op, &YourTypeStruct); @@ -51,40 +52,6 @@ the 1st step is performed automatically for you, so in a C++ class constructor you would start directly with PyObject_Init/InitVar. */ - -/* Inline functions trading binary compatibility for speed: - PyObject_INIT() is the fast version of PyObject_Init(), and - PyObject_INIT_VAR() is the fast version of PyObject_InitVar(). - - These inline functions must not be called with op=NULL. */ -static inline PyObject* -_PyObject_INIT(PyObject *op, PyTypeObject *typeobj) -{ - assert(op != NULL); - Py_SET_TYPE(op, typeobj); - if (PyType_GetFlags(typeobj) & Py_TPFLAGS_HEAPTYPE) { - Py_INCREF(typeobj); - } - _Py_NewReference(op); - return op; -} - -#define PyObject_INIT(op, typeobj) \ - _PyObject_INIT(_PyObject_CAST(op), (typeobj)) - -static inline PyVarObject* -_PyObject_INIT_VAR(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size) -{ - assert(op != NULL); - Py_SET_SIZE(op, size); - PyObject_INIT((PyObject *)op, typeobj); - return op; -} - -#define PyObject_INIT_VAR(op, typeobj, size) \ - _PyObject_INIT_VAR(_PyVarObject_CAST(op), (typeobj), (size)) - - /* This function returns the number of allocated memory blocks, regardless of size */ PyAPI_FUNC(Py_ssize_t) _Py_GetAllocatedBlocks(void); diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 32e86d06db5b4..14444a70ceb01 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -15,6 +15,37 @@ extern "C" { PyAPI_FUNC(int) _PyType_CheckConsistency(PyTypeObject *type); PyAPI_FUNC(int) _PyDict_CheckConsistency(PyObject *mp, int check_content); +// Fast inlined version of PyType_HasFeature() +static inline int +_PyType_HasFeature(PyTypeObject *type, unsigned long feature) { + return ((type->tp_flags & feature) != 0); +} + +/* Inline functions trading binary compatibility for speed: + _PyObject_Init() is the fast version of PyObject_Init(), and + _PyObject_InitVar() is the fast version of PyObject_InitVar(). + + These inline functions must not be called with op=NULL. */ +static inline void +_PyObject_Init(PyObject *op, PyTypeObject *typeobj) +{ + assert(op != NULL); + Py_SET_TYPE(op, typeobj); + if (_PyType_HasFeature(typeobj, Py_TPFLAGS_HEAPTYPE)) { + Py_INCREF(typeobj); + } + _Py_NewReference(op); +} + +static inline void +_PyObject_InitVar(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size) +{ + assert(op != NULL); + Py_SET_SIZE(op, size); + _PyObject_Init((PyObject *)op, typeobj); +} + + /* Tell the GC to track this object. * * NB: While the object is tracked by the collector, it must be safe to call the @@ -96,12 +127,6 @@ _PyObject_GET_WEAKREFS_LISTPTR(PyObject *op) return (PyObject **)((char *)op + offset); } -// Fast inlined version of PyType_HasFeature() -static inline int -_PyType_HasFeature(PyTypeObject *type, unsigned long feature) { - return ((type->tp_flags & feature) != 0); -} - // Fast inlined version of PyObject_IS_GC() static inline int _PyObject_IS_GC(PyObject *obj) diff --git a/Include/internal/pycore_pymem.h b/Include/internal/pycore_pymem.h index 3d925e2250d25..e4e35c16ce8ed 100644 --- a/Include/internal/pycore_pymem.h +++ b/Include/internal/pycore_pymem.h @@ -69,9 +69,6 @@ PyAPI_FUNC(int) _PyMem_GetAllocatorName( PYMEM_ALLOCATOR_NOT_SET does nothing. */ PyAPI_FUNC(int) _PyMem_SetupAllocators(PyMemAllocatorName allocator); -/* bpo-35053: Expose _Py_tracemalloc_config for _Py_NewReference() - which access directly _Py_tracemalloc_config.tracing for best - performances. */ struct _PyTraceMalloc_Config { /* Module initialized? Variable protected by the GIL */ diff --git a/Include/objimpl.h b/Include/objimpl.h index 030d7eee29723..af537175bfed8 100644 --- a/Include/objimpl.h +++ b/Include/objimpl.h @@ -118,7 +118,14 @@ PyAPI_FUNC(void) PyObject_Free(void *ptr); /* Functions */ PyAPI_FUNC(PyObject *) PyObject_Init(PyObject *, PyTypeObject *); PyAPI_FUNC(PyVarObject *) PyObject_InitVar(PyVarObject *, - PyTypeObject *, Py_ssize_t); + PyTypeObject *, Py_ssize_t); + +#define PyObject_INIT(op, typeobj) \ + PyObject_Init(_PyObject_CAST(op), (typeobj)) +#define PyObject_INIT_VAR(op, typeobj, size) \ + PyObject_InitVar(_PyVarObject_CAST(op), (typeobj), (size)) + + PyAPI_FUNC(PyObject *) _PyObject_New(PyTypeObject *); PyAPI_FUNC(PyVarObject *) _PyObject_NewVar(PyTypeObject *, Py_ssize_t); @@ -136,19 +143,6 @@ PyAPI_FUNC(PyVarObject *) _PyObject_NewVar(PyTypeObject *, Py_ssize_t); #define PyObject_NEW_VAR(type, typeobj, n) PyObject_NewVar(type, typeobj, n) -#ifdef Py_LIMITED_API -/* Define PyObject_INIT() and PyObject_INIT_VAR() as aliases to PyObject_Init() - and PyObject_InitVar() in the limited C API for compatibility with the - CPython C API. */ -# define PyObject_INIT(op, typeobj) \ - PyObject_Init(_PyObject_CAST(op), (typeobj)) -# define PyObject_INIT_VAR(op, typeobj, size) \ - PyObject_InitVar(_PyVarObject_CAST(op), (typeobj), (size)) -#else -/* PyObject_INIT() and PyObject_INIT_VAR() are defined in cpython/objimpl.h */ -#endif - - /* * Garbage Collection Support * ========================== diff --git a/Misc/NEWS.d/next/C API/2020-06-15-23-17-51.bpo-40989.tlzG3r.rst b/Misc/NEWS.d/next/C API/2020-06-15-23-17-51.bpo-40989.tlzG3r.rst new file mode 100644 index 0000000000000..1be473d142760 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-15-23-17-51.bpo-40989.tlzG3r.rst @@ -0,0 +1,3 @@ +The :c:func:`PyObject_INIT` and :c:func:`PyObject_INIT_VAR` macros become +aliases to, respectively, :c:func:`PyObject_Init` and +:c:func:`PyObject_InitVar` functions. diff --git a/Modules/_datetimemodule.c b/Modules/_datetimemodule.c index acdde83dc845a..74a54e74ae0fe 100644 --- a/Modules/_datetimemodule.c +++ b/Modules/_datetimemodule.c @@ -8,6 +8,7 @@ #define _PY_DATETIME_IMPL #include "Python.h" +#include "pycore_object.h" // _PyObject_Init() #include "datetime.h" #include "structmember.h" // PyMemberDef @@ -638,30 +639,24 @@ normalize_datetime(int *year, int *month, int *day, static PyObject * time_alloc(PyTypeObject *type, Py_ssize_t aware) { - PyObject *self; - - self = (PyObject *) - PyObject_MALLOC(aware ? - sizeof(PyDateTime_Time) : - sizeof(_PyDateTime_BaseTime)); - if (self == NULL) - return (PyObject *)PyErr_NoMemory(); - (void)PyObject_INIT(self, type); + size_t size = aware ? sizeof(PyDateTime_Time) : sizeof(_PyDateTime_BaseTime); + PyObject *self = (PyObject *)PyObject_Malloc(size); + if (self == NULL) { + return PyErr_NoMemory(); + } + _PyObject_Init(self, type); return self; } static PyObject * datetime_alloc(PyTypeObject *type, Py_ssize_t aware) { - PyObject *self; - - self = (PyObject *) - PyObject_MALLOC(aware ? - sizeof(PyDateTime_DateTime) : - sizeof(_PyDateTime_BaseDateTime)); - if (self == NULL) - return (PyObject *)PyErr_NoMemory(); - (void)PyObject_INIT(self, type); + size_t size = aware ? sizeof(PyDateTime_DateTime) : sizeof(_PyDateTime_BaseDateTime); + PyObject *self = (PyObject *)PyObject_Malloc(size); + if (self == NULL) { + return PyErr_NoMemory(); + } + _PyObject_Init(self, type); return self; } diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 444db7b03b4a5..110a48d8cd76f 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -2263,8 +2263,10 @@ PyObject * _PyObject_GC_New(PyTypeObject *tp) { PyObject *op = _PyObject_GC_Malloc(_PyObject_SIZE(tp)); - if (op != NULL) - op = PyObject_INIT(op, tp); + if (op == NULL) { + return NULL; + } + _PyObject_Init(op, tp); return op; } @@ -2280,8 +2282,10 @@ _PyObject_GC_NewVar(PyTypeObject *tp, Py_ssize_t nitems) } size = _PyObject_VAR_SIZE(tp, nitems); op = (PyVarObject *) _PyObject_GC_Malloc(size); - if (op != NULL) - op = PyObject_INIT_VAR(op, tp, nitems); + if (op == NULL) { + return NULL; + } + _PyObject_InitVar(op, tp, nitems); return op; } diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index b79c2460409eb..d39721428634f 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -79,9 +79,10 @@ _PyBytes_FromSize(Py_ssize_t size, int use_calloc) op = (PyBytesObject *)PyObject_Calloc(1, PyBytesObject_SIZE + size); else op = (PyBytesObject *)PyObject_Malloc(PyBytesObject_SIZE + size); - if (op == NULL) + if (op == NULL) { return PyErr_NoMemory(); - (void)PyObject_INIT_VAR(op, &PyBytes_Type, size); + } + _PyObject_InitVar((PyVarObject*)op, &PyBytes_Type, size); op->ob_shash = -1; if (!use_calloc) op->ob_sval[size] = '\0'; @@ -148,9 +149,10 @@ PyBytes_FromString(const char *str) /* Inline PyObject_NewVar */ op = (PyBytesObject *)PyObject_MALLOC(PyBytesObject_SIZE + size); - if (op == NULL) + if (op == NULL) { return PyErr_NoMemory(); - (void)PyObject_INIT_VAR(op, &PyBytes_Type, size); + } + _PyObject_InitVar((PyVarObject*)op, &PyBytes_Type, size); op->ob_shash = -1; memcpy(op->ob_sval, str, size+1); /* share short strings */ @@ -1435,9 +1437,10 @@ bytes_repeat(PyBytesObject *a, Py_ssize_t n) return NULL; } op = (PyBytesObject *)PyObject_MALLOC(PyBytesObject_SIZE + nbytes); - if (op == NULL) + if (op == NULL) { return PyErr_NoMemory(); - (void)PyObject_INIT_VAR(op, &PyBytes_Type, size); + } + _PyObject_InitVar((PyVarObject*)op, &PyBytes_Type, size); op->ob_shash = -1; op->ob_sval[size] = '\0'; if (Py_SIZE(a) == 1 && n > 0) { diff --git a/Objects/complexobject.c b/Objects/complexobject.c index a49037783be77..d983a30901d9e 100644 --- a/Objects/complexobject.c +++ b/Objects/complexobject.c @@ -6,8 +6,10 @@ /* Submitted by Jim Hugunin */ #include "Python.h" +#include "pycore_object.h" // _PyObject_Init() #include "structmember.h" // PyMemberDef + /*[clinic input] class complex "PyComplexObject *" "&PyComplex_Type" [clinic start generated code]*/ @@ -229,13 +231,12 @@ complex_subtype_from_c_complex(PyTypeObject *type, Py_complex cval) PyObject * PyComplex_FromCComplex(Py_complex cval) { - PyComplexObject *op; - /* Inline PyObject_New */ - op = (PyComplexObject *) PyObject_MALLOC(sizeof(PyComplexObject)); - if (op == NULL) + PyComplexObject *op = PyObject_MALLOC(sizeof(PyComplexObject)); + if (op == NULL) { return PyErr_NoMemory(); - (void)PyObject_INIT(op, &PyComplex_Type); + } + _PyObject_Init((PyObject*)op, &PyComplex_Type); op->cval = cval; return (PyObject *) op; } diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 65625fe88cad8..7ffd7eebe5a45 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -4,8 +4,9 @@ for any kind of float exception without losing portability. */ #include "Python.h" -#include "pycore_dtoa.h" +#include "pycore_dtoa.h" // _Py_dg_dtoa() #include "pycore_interp.h" // _PyInterpreterState.float_state +#include "pycore_object.h" // _PyObject_Init() #include "pycore_pystate.h" // _PyInterpreterState_GET() #include @@ -129,7 +130,7 @@ PyFloat_FromDouble(double fval) return PyErr_NoMemory(); } } - (void)PyObject_INIT(op, &PyFloat_Type); + _PyObject_Init((PyObject*)op, &PyFloat_Type); op->ob_fval = fval; return (PyObject *) op; } diff --git a/Objects/longobject.c b/Objects/longobject.c index d92a9c56a7208..d00a7a048ddce 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -5,6 +5,7 @@ #include "Python.h" #include "pycore_bitutils.h" // _Py_popcount32() #include "pycore_interp.h" // _PY_NSMALLPOSINTS +#include "pycore_object.h" // _PyObject_InitVar() #include "pycore_pystate.h" // _Py_IsMainInterpreter() #include "longintrepr.h" @@ -146,7 +147,8 @@ _PyLong_New(Py_ssize_t size) PyErr_NoMemory(); return NULL; } - return (PyLongObject*)PyObject_INIT_VAR(result, &PyLong_Type, size); + _PyObject_InitVar((PyVarObject*)result, &PyLong_Type, size); + return result; } PyObject * diff --git a/Objects/object.c b/Objects/object.c index 0ab5de28499a8..4481fc91e1db1 100644 --- a/Objects/object.c +++ b/Objects/object.c @@ -139,23 +139,23 @@ Py_DecRef(PyObject *o) PyObject * PyObject_Init(PyObject *op, PyTypeObject *tp) { - /* Any changes should be reflected in PyObject_INIT() macro */ if (op == NULL) { return PyErr_NoMemory(); } - return PyObject_INIT(op, tp); + _PyObject_Init(op, tp); + return op; } PyVarObject * PyObject_InitVar(PyVarObject *op, PyTypeObject *tp, Py_ssize_t size) { - /* Any changes should be reflected in PyObject_INIT_VAR() macro */ if (op == NULL) { return (PyVarObject *) PyErr_NoMemory(); } - return PyObject_INIT_VAR(op, tp, size); + _PyObject_InitVar(op, tp, size); + return op; } PyObject * @@ -165,7 +165,7 @@ _PyObject_New(PyTypeObject *tp) if (op == NULL) { return PyErr_NoMemory(); } - PyObject_INIT(op, tp); + _PyObject_Init(op, tp); return op; } @@ -175,9 +175,11 @@ _PyObject_NewVar(PyTypeObject *tp, Py_ssize_t nitems) PyVarObject *op; const size_t size = _PyObject_VAR_SIZE(tp, nitems); op = (PyVarObject *) PyObject_MALLOC(size); - if (op == NULL) + if (op == NULL) { return (PyVarObject *)PyErr_NoMemory(); - return PyObject_INIT_VAR(op, tp, nitems); + } + _PyObject_InitVar(op, tp, nitems); + return op; } void diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 8bfa0894a79d4..2ff4c48111fe0 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -62,7 +62,7 @@ tuple_alloc(struct _Py_tuple_state *state, Py_ssize_t size) assert(size != 0); state->free_list[size] = (PyTupleObject *) op->ob_item[0]; state->numfree[size]--; - /* Inline PyObject_InitVar */ + /* Inlined _PyObject_InitVar() without _PyType_HasFeature() test */ #ifdef Py_TRACE_REFS Py_SET_SIZE(op, size); Py_SET_TYPE(op, &PyTuple_Type); diff --git a/Objects/typeobject.c b/Objects/typeobject.c index c8f0d2ee45f3c..f0e349ecd2bb9 100644 --- a/Objects/typeobject.c +++ b/Objects/typeobject.c @@ -1060,10 +1060,10 @@ PyType_GenericAlloc(PyTypeObject *type, Py_ssize_t nitems) memset(obj, '\0', size); if (type->tp_itemsize == 0) { - (void)PyObject_INIT(obj, type); + _PyObject_Init(obj, type); } else { - (void) PyObject_INIT_VAR((PyVarObject *)obj, type, nitems); + _PyObject_InitVar((PyVarObject *)obj, type, nitems); } if (_PyType_IS_GC(type)) { diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 7ab0c882db049..c75eb077e0c80 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -1435,11 +1435,10 @@ PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) * it's data buffer. */ obj = (PyObject *) PyObject_MALLOC(struct_size + (size + 1) * char_size); - if (obj == NULL) + if (obj == NULL) { return PyErr_NoMemory(); - obj = PyObject_INIT(obj, &PyUnicode_Type); - if (obj == NULL) - return NULL; + } + _PyObject_Init(obj, &PyUnicode_Type); unicode = (PyCompactUnicodeObject *)obj; if (is_ascii) @@ -8392,9 +8391,11 @@ PyUnicode_BuildEncodingMap(PyObject* string) /* Create a three-level trie */ result = PyObject_MALLOC(sizeof(struct encoding_map) + 16*count2 + 128*count3 - 1); - if (!result) + if (!result) { return PyErr_NoMemory(); - PyObject_Init(result, &EncodingMapType); + } + + _PyObject_Init(result, &EncodingMapType); mresult = (struct encoding_map*)result; mresult->count2 = count2; mresult->count3 = count3; diff --git a/PC/winreg.c b/PC/winreg.c index 1305b7030fada..7c3b2f4be85c9 100644 --- a/PC/winreg.c +++ b/PC/winreg.c @@ -14,6 +14,7 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" +#include "pycore_object.h" // _PyObject_Init() #include "structmember.h" // PyMemberDef #include @@ -457,13 +458,12 @@ clinic_HKEY_converter(PyObject *ob, void *p) PyObject * PyHKEY_FromHKEY(HKEY h) { - PyHKEYObject *op; - /* Inline PyObject_New */ - op = (PyHKEYObject *) PyObject_MALLOC(sizeof(PyHKEYObject)); - if (op == NULL) + PyHKEYObject *op = (PyHKEYObject *) PyObject_MALLOC(sizeof(PyHKEYObject)); + if (op == NULL) { return PyErr_NoMemory(); - PyObject_INIT(op, &PyHKEY_Type); + } + _PyObject_Init(op, &PyHKEY_Type); op->hkey = h; return (PyObject *)op; } diff --git a/setup.py b/setup.py index ef2faf613beec..b220f5279ca63 100644 --- a/setup.py +++ b/setup.py @@ -853,7 +853,8 @@ def detect_simple_extensions(self): # libm is needed by delta_new() that uses round() and by accum() that # uses modf(). self.add(Extension('_datetime', ['_datetimemodule.c'], - libraries=['m'])) + libraries=['m'], + extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) # zoneinfo module self.add(Extension('_zoneinfo', ['_zoneinfo.c'])), # random number generator implemented in C From webhook-mailer at python.org Mon Jun 15 19:54:02 2020 From: webhook-mailer at python.org (David Szotten) Date: Mon, 15 Jun 2020 23:54:02 -0000 Subject: [Python-checkins] closes bpo-28557: error message for bad raw readinto (GH-7496) Message-ID: https://github.com/python/cpython/commit/8666356280084f0426c28a981341f72eaaacd006 commit: 8666356280084f0426c28a981341f72eaaacd006 branch: master author: David Szotten committer: GitHub date: 2020-06-15T18:53:57-05:00 summary: closes bpo-28557: error message for bad raw readinto (GH-7496) Co-authored-by: Benjamin Peterson files: A Misc/NEWS.d/next/Library/2018-06-07-22-04-01.bpo-28557.ViNJnK.rst M Lib/test/test_io.py M Misc/ACKS M Modules/_io/bufferedio.c diff --git a/Lib/test/test_io.py b/Lib/test/test_io.py index 7b8511b66bf10..c0d67a17d8c6f 100644 --- a/Lib/test/test_io.py +++ b/Lib/test/test_io.py @@ -1587,6 +1587,22 @@ def test_args_error(self): with self.assertRaisesRegex(TypeError, "BufferedReader"): self.tp(io.BytesIO(), 1024, 1024, 1024) + def test_bad_readinto_value(self): + rawio = io.BufferedReader(io.BytesIO(b"12")) + rawio.readinto = lambda buf: -1 + bufio = self.tp(rawio) + with self.assertRaises(OSError) as cm: + bufio.readline() + self.assertIsNone(cm.exception.__cause__) + + def test_bad_readinto_type(self): + rawio = io.BufferedReader(io.BytesIO(b"12")) + rawio.readinto = lambda buf: b'' + bufio = self.tp(rawio) + with self.assertRaises(OSError) as cm: + bufio.readline() + self.assertIsInstance(cm.exception.__cause__, TypeError) + class PyBufferedReaderTest(BufferedReaderTest): tp = pyio.BufferedReader diff --git a/Misc/ACKS b/Misc/ACKS index 0fc1954a22237..87f0dede365c2 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1678,6 +1678,7 @@ P?ter Szab? John Szakmeister Piotr Szczepaniak Amir Szekely +David Szotten Maciej Szulik Joel Taddei Arfrever Frehtes Taifersar Arahesis diff --git a/Misc/NEWS.d/next/Library/2018-06-07-22-04-01.bpo-28557.ViNJnK.rst b/Misc/NEWS.d/next/Library/2018-06-07-22-04-01.bpo-28557.ViNJnK.rst new file mode 100644 index 0000000000000..4137e2ff89beb --- /dev/null +++ b/Misc/NEWS.d/next/Library/2018-06-07-22-04-01.bpo-28557.ViNJnK.rst @@ -0,0 +1 @@ +Improve the error message for a misbehaving ``rawio.readinto`` diff --git a/Modules/_io/bufferedio.c b/Modules/_io/bufferedio.c index f8e21f206f316..5984d34cc0829 100644 --- a/Modules/_io/bufferedio.c +++ b/Modules/_io/bufferedio.c @@ -1483,6 +1483,15 @@ _bufferedreader_raw_read(buffered *self, char *start, Py_ssize_t len) } n = PyNumber_AsSsize_t(res, PyExc_ValueError); Py_DECREF(res); + + if (n == -1 && PyErr_Occurred()) { + _PyErr_FormatFromCause( + PyExc_OSError, + "raw readinto() failed" + ); + return -1; + } + if (n < 0 || n > len) { PyErr_Format(PyExc_OSError, "raw readinto() returned invalid length %zd " From webhook-mailer at python.org Mon Jun 15 20:27:42 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Tue, 16 Jun 2020 00:27:42 -0000 Subject: [Python-checkins] bpo-40985: Show correct SyntaxError text when last line has a LINECONT (GH-20888) Message-ID: https://github.com/python/cpython/commit/113e2b0a07c72c0d5e3489076afb14f6b3ad1049 commit: 113e2b0a07c72c0d5e3489076afb14f6b3ad1049 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-16T01:27:33+01:00 summary: bpo-40985: Show correct SyntaxError text when last line has a LINECONT (GH-20888) When a file ends with a line that contains a line continuation character the text of the emitted SyntaxError is empty, contrary to the old parser, where the error text contained the text of the last line. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-15-16-29-55.bpo-40985.IIN_xX.rst M Lib/test/test_eof.py M Python/errors.c diff --git a/Lib/test/test_eof.py b/Lib/test/test_eof.py index bebad3106119e..51cbbd8eed664 100644 --- a/Lib/test/test_eof.py +++ b/Lib/test/test_eof.py @@ -52,10 +52,14 @@ def test_line_continuation_EOF_from_file_bpo2180(self): file_name = script_helper.make_script(temp_dir, 'foo', '\\') rc, out, err = script_helper.assert_python_failure(file_name) self.assertIn(b'unexpected EOF while parsing', err) + self.assertIn(b'line 2', err) + self.assertIn(b'\\', err) file_name = script_helper.make_script(temp_dir, 'foo', 'y = 6\\') rc, out, err = script_helper.assert_python_failure(file_name) self.assertIn(b'unexpected EOF while parsing', err) + self.assertIn(b'line 2', err) + self.assertIn(b'y = 6\\', err) if __name__ == "__main__": unittest.main() diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-15-16-29-55.bpo-40985.IIN_xX.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-15-16-29-55.bpo-40985.IIN_xX.rst new file mode 100644 index 0000000000000..e07134c7166ad --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-15-16-29-55.bpo-40985.IIN_xX.rst @@ -0,0 +1 @@ +Fix a bug that caused the :exc:`SyntaxError` text to be empty when a file ends with a line ending in a line continuation character (i.e. backslash). The error text should contain the text of the last line. diff --git a/Python/errors.c b/Python/errors.c index cc00ae4a5407b..720f18bc224d4 100644 --- a/Python/errors.c +++ b/Python/errors.c @@ -1646,16 +1646,18 @@ err_programtext(PyThreadState *tstate, FILE *fp, int lineno) { int i; char linebuf[1000]; - - if (fp == NULL) + if (fp == NULL) { return NULL; + } + for (i = 0; i < lineno; i++) { char *pLastChar = &linebuf[sizeof(linebuf) - 2]; do { *pLastChar = '\0'; if (Py_UniversalNewlineFgets(linebuf, sizeof linebuf, - fp, NULL) == NULL) - break; + fp, NULL) == NULL) { + goto after_loop; + } /* fgets read *something*; if it didn't get as far as pLastChar, it must have found a newline or hit the end of the file; if pLastChar is \n, @@ -1663,6 +1665,8 @@ err_programtext(PyThreadState *tstate, FILE *fp, int lineno) yet seen a newline, so must continue */ } while (*pLastChar != '\0' && *pLastChar != '\n'); } + +after_loop: fclose(fp); if (i == lineno) { PyObject *res; From webhook-mailer at python.org Mon Jun 15 20:56:36 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Tue, 16 Jun 2020 00:56:36 -0000 Subject: [Python-checkins] bpo-19569: Add a macro to suppress deprecation warnings (GH-9004) Message-ID: https://github.com/python/cpython/commit/de4304dad8e035dbbb57d653e685312eead816df commit: de4304dad8e035dbbb57d653e685312eead816df branch: master author: Zackery Spytz committer: GitHub date: 2020-06-16T09:56:27+09:00 summary: bpo-19569: Add a macro to suppress deprecation warnings (GH-9004) Co-authored-by: Arfrever Frehtes Taifersar Arahesis files: A Misc/NEWS.d/next/Core and Builtins/2018-08-29-15-57-07.bpo-19569.RGu2Kb.rst M Include/pyport.h diff --git a/Include/pyport.h b/Include/pyport.h index 3c71f30bce16f..7137006870bf0 100644 --- a/Include/pyport.h +++ b/Include/pyport.h @@ -515,6 +515,26 @@ extern "C" { #define Py_DEPRECATED(VERSION_UNUSED) #endif +#if defined(__clang__) +#define _Py_COMP_DIAG_PUSH _Pragma("clang diagnostic push") +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS \ + _Pragma("clang diagnostic ignored \"-Wdeprecated-declarations\"") +#define _Py_COMP_DIAG_POP _Pragma("clang diagnostic pop") +#elif defined(__GNUC__) \ + && ((__GNUC__ >= 5) || (__GNUC__ == 4) && (__GNUC_MINOR__ >= 6)) +#define _Py_COMP_DIAG_PUSH _Pragma("GCC diagnostic push") +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS \ + _Pragma("GCC diagnostic ignored \"-Wdeprecated-declarations\"") +#define _Py_COMP_DIAG_POP _Pragma("GCC diagnostic pop") +#elif defined(_MSC_VER) +#define _Py_COMP_DIAG_PUSH __pragma(warning(push)) +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS __pragma(warning(disable: 4996)) +#define _Py_COMP_DIAG_POP __pragma(warning(pop)) +#else +#define _Py_COMP_DIAG_PUSH +#define _Py_COMP_DIAG_IGNORE_DEPR_DECLS +#define _Py_COMP_DIAG_POP +#endif /* _Py_HOT_FUNCTION * The hot attribute on a function is used to inform the compiler that the diff --git a/Misc/NEWS.d/next/Core and Builtins/2018-08-29-15-57-07.bpo-19569.RGu2Kb.rst b/Misc/NEWS.d/next/Core and Builtins/2018-08-29-15-57-07.bpo-19569.RGu2Kb.rst new file mode 100644 index 0000000000000..1b76bd8e247fc --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2018-08-29-15-57-07.bpo-19569.RGu2Kb.rst @@ -0,0 +1,2 @@ +Add the private macros ``_Py_COMP_DIAG_PUSH``, +``_Py_COMP_DIAG_IGNORE_DEPR_DECLS``, and ``_Py_COMP_DIAG_POP``. From webhook-mailer at python.org Mon Jun 15 21:13:41 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Tue, 16 Jun 2020 01:13:41 -0000 Subject: [Python-checkins] Remove old comment in string_parser.c (GH-20906) Message-ID: https://github.com/python/cpython/commit/e0bec69854ff17b1a6db9bc01db8c002761bb7a8 commit: e0bec69854ff17b1a6db9bc01db8c002761bb7a8 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-16T02:13:33+01:00 summary: Remove old comment in string_parser.c (GH-20906) files: M Parser/string_parser.c diff --git a/Parser/string_parser.c b/Parser/string_parser.c index 7d50e43f4e342..f8e2427276cd3 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -6,11 +6,6 @@ //// STRING HANDLING FUNCTIONS //// -// These functions are ported directly from Python/ast.c with some modifications -// to account for the use of "Parser *p", the fact that don't have parser nodes -// to pass around and the usage of some specialized APIs present only in this -// file (like "_PyPegen_raise_syntax_error"). - static int warn_invalid_escape_sequence(Parser *p, unsigned char first_invalid_escape_char, Token *t) { From webhook-mailer at python.org Tue Jun 16 11:27:50 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 16 Jun 2020 15:27:50 -0000 Subject: [Python-checkins] bpo-40993: Don't run Travis CI coverage on PRs (GH-20916) Message-ID: https://github.com/python/cpython/commit/fc710ee266e9461fdba9933ec6004318db588820 commit: fc710ee266e9461fdba9933ec6004318db588820 branch: master author: Victor Stinner committer: GitHub date: 2020-06-16T17:27:30+02:00 summary: bpo-40993: Don't run Travis CI coverage on PRs (GH-20916) C and Python coverage jobs of Travis CI are no longer run on pull requests, only on branches like master. files: M .travis.yml diff --git a/.travis.yml b/.travis.yml index 5d57150e61c18..a915f7a46ec3d 100644 --- a/.travis.yml +++ b/.travis.yml @@ -82,6 +82,12 @@ matrix: packages: - xvfb before_script: + - | + if [[ "$TRAVIS_PULL_REQUEST" != "false" ]] + then + echo "Don't run Python coverage on pull requests." + exit + fi - ./configure - make -j4 # Need a venv that can parse covered code. @@ -109,6 +115,12 @@ matrix: - lcov - xvfb before_script: + - | + if [[ "$TRAVIS_PULL_REQUEST" != "false" ]] + then + echo "Don't run C coverage on pull requests." + exit + fi - ./configure script: - xvfb-run make -j4 coverage-report From webhook-mailer at python.org Tue Jun 16 11:29:58 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 16 Jun 2020 15:29:58 -0000 Subject: [Python-checkins] bpo-40989: Make _PyTraceMalloc_NewReference() internal (GH-20915) Message-ID: https://github.com/python/cpython/commit/fcc60e40bbfe8a229b8b83f1d1ee77fd4bf870d1 commit: fcc60e40bbfe8a229b8b83f1d1ee77fd4bf870d1 branch: master author: Victor Stinner committer: GitHub date: 2020-06-16T17:29:50+02:00 summary: bpo-40989: Make _PyTraceMalloc_NewReference() internal (GH-20915) Make the _PyTraceMalloc_NewReference() function fully internal: remove it from the public C API and don't export it anymore. files: M Include/cpython/object.h M Include/internal/pycore_object.h diff --git a/Include/cpython/object.h b/Include/cpython/object.h index 304cfbfc37dff..ae3920d4508e1 100644 --- a/Include/cpython/object.h +++ b/Include/cpython/object.h @@ -9,10 +9,6 @@ PyAPI_FUNC(void) _Py_NewReference(PyObject *op); PyAPI_FUNC(void) _Py_ForgetReference(PyObject *); #endif -/* Update the Python traceback of an object. This function must be called - when a memory block is reused from a free list. */ -PyAPI_FUNC(int) _PyTraceMalloc_NewReference(PyObject *op); - #ifdef Py_REF_DEBUG PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void); #endif diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 14444a70ceb01..9740717b8a397 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -15,6 +15,12 @@ extern "C" { PyAPI_FUNC(int) _PyType_CheckConsistency(PyTypeObject *type); PyAPI_FUNC(int) _PyDict_CheckConsistency(PyObject *mp, int check_content); +/* Update the Python traceback of an object. This function must be called + when a memory block is reused from a free list. + + Internal function called by _Py_NewReference(). */ +extern int _PyTraceMalloc_NewReference(PyObject *op); + // Fast inlined version of PyType_HasFeature() static inline int _PyType_HasFeature(PyTypeObject *type, unsigned long feature) { From webhook-mailer at python.org Tue Jun 16 11:47:24 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 16 Jun 2020 15:47:24 -0000 Subject: [Python-checkins] bpo-40993: Don't run Travis CI coverage on PRs (GH-20916) Message-ID: https://github.com/python/cpython/commit/071bed842eeff9673bc5c4f64e3916a151132d2a commit: 071bed842eeff9673bc5c4f64e3916a151132d2a branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-16T08:47:16-07:00 summary: bpo-40993: Don't run Travis CI coverage on PRs (GH-20916) C and Python coverage jobs of Travis CI are no longer run on pull requests, only on branches like master. (cherry picked from commit fc710ee266e9461fdba9933ec6004318db588820) Co-authored-by: Victor Stinner files: M .travis.yml diff --git a/.travis.yml b/.travis.yml index 1d3eb737abffa..2b6fed6175265 100644 --- a/.travis.yml +++ b/.travis.yml @@ -82,6 +82,12 @@ matrix: packages: - xvfb before_script: + - | + if [[ "$TRAVIS_PULL_REQUEST" != "false" ]] + then + echo "Don't run Python coverage on pull requests." + exit + fi - ./configure - make -j4 # Need a venv that can parse covered code. @@ -106,6 +112,12 @@ matrix: - lcov - xvfb before_script: + - | + if [[ "$TRAVIS_PULL_REQUEST" != "false" ]] + then + echo "Don't run C coverage on pull requests." + exit + fi - ./configure script: - xvfb-run make -j4 coverage-report From webhook-mailer at python.org Tue Jun 16 11:49:52 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Tue, 16 Jun 2020 15:49:52 -0000 Subject: [Python-checkins] bpo-40958: Avoid buffer overflow in the parser when indexing the current line (GH-20875) Message-ID: https://github.com/python/cpython/commit/51c5896b6205911d29ac07f167ec7f3cf1cb600d commit: 51c5896b6205911d29ac07f167ec7f3cf1cb600d branch: master author: Pablo Galindo committer: GitHub date: 2020-06-16T16:49:43+01:00 summary: bpo-40958: Avoid buffer overflow in the parser when indexing the current line (GH-20875) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-15-01-20-44.bpo-40958.7O2Wh1.rst M Parser/pegen.c M Parser/pegen.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-15-01-20-44.bpo-40958.7O2Wh1.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-15-01-20-44.bpo-40958.7O2Wh1.rst new file mode 100644 index 0000000000000..8e36897948f9b --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-15-01-20-44.bpo-40958.7O2Wh1.rst @@ -0,0 +1,2 @@ +Fix a possible buffer overflow in the PEG parser when gathering information +for emitting syntax errors. Patch by Pablo Galindo. diff --git a/Parser/pegen.c b/Parser/pegen.c index 4cff7342edbbc..e153e924e9311 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -140,21 +140,18 @@ _create_dummy_identifier(Parser *p) } static inline Py_ssize_t -byte_offset_to_character_offset(PyObject *line, int col_offset) +byte_offset_to_character_offset(PyObject *line, Py_ssize_t col_offset) { const char *str = PyUnicode_AsUTF8(line); if (!str) { return 0; } + assert(col_offset >= 0 && (unsigned long)col_offset <= strlen(str)); PyObject *text = PyUnicode_DecodeUTF8(str, col_offset, "replace"); if (!text) { return 0; } Py_ssize_t size = PyUnicode_GET_LENGTH(text); - str = PyUnicode_AsUTF8(text); - if (str != NULL && (int)strlen(str) == col_offset) { - size = strlen(str); - } Py_DECREF(text); return size; } @@ -366,7 +363,7 @@ void * _PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) { Token *t = p->known_err_token != NULL ? p->known_err_token : p->tokens[p->fill - 1]; - int col_offset; + Py_ssize_t col_offset; if (t->col_offset == -1) { col_offset = Py_SAFE_DOWNCAST(p->tok->cur - p->tok->buf, intptr_t, int); @@ -386,7 +383,7 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) void * _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, - int lineno, int col_offset, + Py_ssize_t lineno, Py_ssize_t col_offset, const char *errmsg, va_list va) { PyObject *value = NULL; @@ -406,16 +403,17 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, if (!error_line) { Py_ssize_t size = p->tok->inp - p->tok->buf; - if (size && p->tok->buf[size-1] == '\n') { - size--; - } error_line = PyUnicode_DecodeUTF8(p->tok->buf, size, "replace"); if (!error_line) { goto error; } } - Py_ssize_t col_number = byte_offset_to_character_offset(error_line, col_offset); + Py_ssize_t col_number = col_offset; + + if (p->tok->encoding != NULL) { + col_number = byte_offset_to_character_offset(error_line, col_offset); + } tmp = Py_BuildValue("(OiiN)", p->tok->filename, lineno, col_number, error_line); if (!tmp) { diff --git a/Parser/pegen.h b/Parser/pegen.h index 64cf0ec892913..c4ff8c9d51252 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -34,7 +34,7 @@ typedef struct _memo { typedef struct { int type; PyObject *bytes; - int lineno, col_offset, end_lineno, end_col_offset; + Py_ssize_t lineno, col_offset, end_lineno, end_col_offset; Memo *memo; } Token; @@ -132,7 +132,7 @@ void *_PyPegen_string_token(Parser *p); const char *_PyPegen_get_expr_name(expr_ty); void *_PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...); void *_PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, - int lineno, int col_offset, + Py_ssize_t lineno, Py_ssize_t col_offset, const char *errmsg, va_list va); void *_PyPegen_dummy_name(Parser *p, ...); From webhook-mailer at python.org Tue Jun 16 12:41:36 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Tue, 16 Jun 2020 16:41:36 -0000 Subject: [Python-checkins] bpo-1635741: Port _gdbm module to multiphase initialization (GH-20920) Message-ID: https://github.com/python/cpython/commit/c4862e333ab405dd5789b4061222db1982147de4 commit: c4862e333ab405dd5789b4061222db1982147de4 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-17T01:41:23+09:00 summary: bpo-1635741: Port _gdbm module to multiphase initialization (GH-20920) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-17-00-52-21.bpo-1635741.61iyYh.rst M Modules/_gdbmmodule.c M Modules/clinic/_gdbmmodule.c.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-17-00-52-21.bpo-1635741.61iyYh.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-17-00-52-21.bpo-1635741.61iyYh.rst new file mode 100644 index 0000000000000..cffe70dd71eaf --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-17-00-52-21.bpo-1635741.61iyYh.rst @@ -0,0 +1 @@ +Port :mod:`_gdbm` to multiphase initialization. diff --git a/Modules/_gdbmmodule.c b/Modules/_gdbmmodule.c index dd4c6b16f745c..9e843acbaa6ba 100644 --- a/Modules/_gdbmmodule.c +++ b/Modules/_gdbmmodule.c @@ -1,5 +1,5 @@ -/* DBM module using dictionary interface */ +/* GDBM module using dictionary interface */ /* Author: Anthony Baxter, after dbmmodule.c */ /* Doc strings: Mitch Chapman */ @@ -16,11 +16,24 @@ extern const char * gdbm_strerror(gdbm_error); #endif +typedef struct { + PyTypeObject *gdbm_type; + PyObject *gdbm_error; +} _gdbm_state; + +static inline _gdbm_state* +get_gdbm_state(PyObject *module) +{ + void *state = PyModule_GetState(module); + assert(state != NULL); + return (_gdbm_state *)state; +} + /*[clinic input] module _gdbm -class _gdbm.gdbm "dbmobject *" "&Dbmtype" +class _gdbm.gdbm "gdbmobject *" "&Gdbmtype" [clinic start generated code]*/ -/*[clinic end generated code: output=da39a3ee5e6b4b0d input=113927c6170729b2]*/ +/*[clinic end generated code: output=da39a3ee5e6b4b0d input=38ae71cedfc7172b]*/ PyDoc_STRVAR(gdbmmodule__doc__, "This module provides an interface to the GNU DBM (GDBM) library.\n\ @@ -38,20 +51,15 @@ typedef struct { PyObject_HEAD Py_ssize_t di_size; /* -1 means recompute */ GDBM_FILE di_dbm; -} dbmobject; - -static PyTypeObject Dbmtype; +} gdbmobject; #include "clinic/_gdbmmodule.c.h" -#define is_dbmobject(v) Py_IS_TYPE(v, &Dbmtype) -#define check_dbmobject_open(v) if ((v)->di_dbm == NULL) \ - { PyErr_SetString(DbmError, "GDBM object has already been closed"); \ - return NULL; } - - - -static PyObject *DbmError; +#define check_gdbmobject_open(v, err) \ + if ((v)->di_dbm == NULL) { \ + PyErr_SetString(err, "GDBM object has already been closed"); \ + return NULL; \ + } PyDoc_STRVAR(gdbm_object__doc__, "This object represents a GDBM database.\n\ @@ -64,20 +72,21 @@ GDBM objects also support additional operations such as firstkey,\n\ nextkey, reorganize, and sync."); static PyObject * -newdbmobject(const char *file, int flags, int mode) +newgdbmobject(_gdbm_state *state, const char *file, int flags, int mode) { - dbmobject *dp; - - dp = PyObject_New(dbmobject, &Dbmtype); - if (dp == NULL) + gdbmobject *dp = PyObject_New(gdbmobject, state->gdbm_type); + if (dp == NULL) { return NULL; + } dp->di_size = -1; errno = 0; if ((dp->di_dbm = gdbm_open((char *)file, 0, flags, mode, NULL)) == 0) { - if (errno != 0) - PyErr_SetFromErrnoWithFilename(DbmError, file); - else - PyErr_SetString(DbmError, gdbm_strerror(gdbm_errno)); + if (errno != 0) { + PyErr_SetFromErrnoWithFilename(state->gdbm_error, file); + } + else { + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); + } Py_DECREF(dp); return NULL; } @@ -87,18 +96,22 @@ newdbmobject(const char *file, int flags, int mode) /* Methods */ static void -dbm_dealloc(dbmobject *dp) +gdbm_dealloc(gdbmobject *dp) { - if (dp->di_dbm) + if (dp->di_dbm) { gdbm_close(dp->di_dbm); - PyObject_Del(dp); + } + PyTypeObject *tp = Py_TYPE(dp); + tp->tp_free(dp); + Py_DECREF(tp); } static Py_ssize_t -dbm_length(dbmobject *dp) +gdbm_length(gdbmobject *dp) { + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if (dp->di_dbm == NULL) { - PyErr_SetString(DbmError, "GDBM object has already been closed"); + PyErr_SetString(state->gdbm_error, "GDBM object has already been closed"); return -1; } if (dp->di_size < 0) { @@ -107,10 +120,10 @@ dbm_length(dbmobject *dp) gdbm_count_t count; if (gdbm_count(dp->di_dbm, &count) == -1) { if (errno != 0) { - PyErr_SetFromErrno(DbmError); + PyErr_SetFromErrno(state->gdbm_error); } else { - PyErr_SetString(DbmError, gdbm_strerror(gdbm_errno)); + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); } return -1; } @@ -161,16 +174,17 @@ parse_datum(PyObject *o, datum *d, const char *failmsg) } static PyObject * -dbm_subscript(dbmobject *dp, PyObject *key) +gdbm_subscript(gdbmobject *dp, PyObject *key) { PyObject *v; datum drec, krec; + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if (!parse_datum(key, &krec, NULL)) { return NULL; } if (dp->di_dbm == NULL) { - PyErr_SetString(DbmError, + PyErr_SetString(state->gdbm_error, "GDBM object has already been closed"); return NULL; } @@ -195,12 +209,12 @@ Get the value for key, or default if not present. [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_get_impl(dbmobject *self, PyObject *key, PyObject *default_value) -/*[clinic end generated code: output=19b7c585ad4f554a input=a9c20423f34c17b6]*/ +_gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value) +/*[clinic end generated code: output=92421838f3a852f4 input=a9c20423f34c17b6]*/ { PyObject *res; - res = dbm_subscript(self, key); + res = gdbm_subscript(self, key); if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); Py_INCREF(default_value); @@ -210,16 +224,17 @@ _gdbm_gdbm_get_impl(dbmobject *self, PyObject *key, PyObject *default_value) } static int -dbm_ass_sub(dbmobject *dp, PyObject *v, PyObject *w) +gdbm_ass_sub(gdbmobject *dp, PyObject *v, PyObject *w) { datum krec, drec; const char *failmsg = "gdbm mappings have bytes or string indices only"; + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if (!parse_datum(v, &krec, failmsg)) { return -1; } if (dp->di_dbm == NULL) { - PyErr_SetString(DbmError, + PyErr_SetString(state->gdbm_error, "GDBM object has already been closed"); return -1; } @@ -230,7 +245,7 @@ dbm_ass_sub(dbmobject *dp, PyObject *v, PyObject *w) PyErr_SetObject(PyExc_KeyError, v); } else { - PyErr_SetString(DbmError, gdbm_strerror(gdbm_errno)); + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); } return -1; } @@ -242,9 +257,9 @@ dbm_ass_sub(dbmobject *dp, PyObject *v, PyObject *w) errno = 0; if (gdbm_store(dp->di_dbm, krec, drec, GDBM_REPLACE) < 0) { if (errno != 0) - PyErr_SetFromErrno(DbmError); + PyErr_SetFromErrno(state->gdbm_error); else - PyErr_SetString(DbmError, + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); return -1; } @@ -263,28 +278,22 @@ Get value for key, or set it to default and return default if not present. [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_setdefault_impl(dbmobject *self, PyObject *key, +_gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, PyObject *default_value) -/*[clinic end generated code: output=88760ee520329012 input=0db46b69e9680171]*/ +/*[clinic end generated code: output=f3246e880509f142 input=0db46b69e9680171]*/ { PyObject *res; - res = dbm_subscript(self, key); + res = gdbm_subscript(self, key); if (res == NULL && PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); - if (dbm_ass_sub(self, key, default_value) < 0) + if (gdbm_ass_sub(self, key, default_value) < 0) return NULL; - return dbm_subscript(self, key); + return gdbm_subscript(self, key); } return res; } -static PyMappingMethods dbm_as_mapping = { - (lenfunc)dbm_length, /*mp_length*/ - (binaryfunc)dbm_subscript, /*mp_subscript*/ - (objobjargproc)dbm_ass_sub, /*mp_ass_subscript*/ -}; - /*[clinic input] _gdbm.gdbm.close @@ -292,11 +301,12 @@ Close the database. [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_close_impl(dbmobject *self) -/*[clinic end generated code: output=23512a594598b563 input=0a203447379b45fd]*/ +_gdbm_gdbm_close_impl(gdbmobject *self) +/*[clinic end generated code: output=f5abb4d6bb9e52d5 input=0a203447379b45fd]*/ { - if (self->di_dbm) + if (self->di_dbm) { gdbm_close(self->di_dbm); + } self->di_dbm = NULL; Py_RETURN_NONE; } @@ -305,22 +315,27 @@ _gdbm_gdbm_close_impl(dbmobject *self) /*[clinic input] _gdbm.gdbm.keys + cls: defining_class + Get a list of all keys in the database. [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_keys_impl(dbmobject *self) -/*[clinic end generated code: output=cb4b1776c3645dcc input=1832ee0a3132cfaf]*/ +_gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=c24b824e81404755 input=1428b7c79703d7d5]*/ { PyObject *v, *item; datum key, nextkey; int err; - if (self == NULL || !is_dbmobject(self)) { + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + + if (self == NULL || !Py_IS_TYPE(self, state->gdbm_type)) { PyErr_BadInternalCall(); return NULL; } - check_dbmobject_open(self); + check_gdbmobject_open(self, state->gdbm_error); v = PyList_New(0); if (v == NULL) @@ -349,14 +364,15 @@ _gdbm_gdbm_keys_impl(dbmobject *self) } static int -dbm_contains(PyObject *self, PyObject *arg) +gdbm_contains(PyObject *self, PyObject *arg) { - dbmobject *dp = (dbmobject *)self; + gdbmobject *dp = (gdbmobject *)self; datum key; Py_ssize_t size; + _gdbm_state *state = PyType_GetModuleState(Py_TYPE(dp)); if ((dp)->di_dbm == NULL) { - PyErr_SetString(DbmError, + PyErr_SetString(state->gdbm_error, "GDBM object has already been closed"); return -1; } @@ -379,22 +395,11 @@ dbm_contains(PyObject *self, PyObject *arg) return gdbm_exists(dp->di_dbm, key); } -static PySequenceMethods dbm_as_sequence = { - 0, /* sq_length */ - 0, /* sq_concat */ - 0, /* sq_repeat */ - 0, /* sq_item */ - 0, /* sq_slice */ - 0, /* sq_ass_item */ - 0, /* sq_ass_slice */ - dbm_contains, /* sq_contains */ - 0, /* sq_inplace_concat */ - 0, /* sq_inplace_repeat */ -}; - /*[clinic input] _gdbm.gdbm.firstkey + cls: defining_class + Return the starting key for the traversal. It's possible to loop over every key in the database using this method @@ -403,13 +408,15 @@ hash values, and won't be sorted by the key values. [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_firstkey_impl(dbmobject *self) -/*[clinic end generated code: output=9ff85628d84b65d2 input=0dbd6a335d69bba0]*/ +_gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=139275e9c8b60827 input=ed8782a029a5d299]*/ { PyObject *v; datum key; + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); - check_dbmobject_open(self); + check_gdbmobject_open(self, state->gdbm_error); key = gdbm_firstkey(self->di_dbm); if (key.dptr) { v = PyBytes_FromStringAndSize(key.dptr, key.dsize); @@ -424,6 +431,7 @@ _gdbm_gdbm_firstkey_impl(dbmobject *self) /*[clinic input] _gdbm.gdbm.nextkey + cls: defining_class key: str(accept={str, robuffer}, zeroes=True) / @@ -439,16 +447,18 @@ to create a list in memory that contains them all: [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_nextkey_impl(dbmobject *self, const char *key, +_gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_clean_t key_length) -/*[clinic end generated code: output=192ab892de6eb2f6 input=1f1606943614e36f]*/ +/*[clinic end generated code: output=204964441fdbaf02 input=fcf6a51a96ce0172]*/ { PyObject *v; datum dbm_key, nextkey; + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); dbm_key.dptr = (char *)key; dbm_key.dsize = key_length; - check_dbmobject_open(self); + check_gdbmobject_open(self, state->gdbm_error); nextkey = gdbm_nextkey(self->di_dbm, dbm_key); if (nextkey.dptr) { v = PyBytes_FromStringAndSize(nextkey.dptr, nextkey.dsize); @@ -463,6 +473,8 @@ _gdbm_gdbm_nextkey_impl(dbmobject *self, const char *key, /*[clinic input] _gdbm.gdbm.reorganize + cls: defining_class + Reorganize the database. If you have carried out a lot of deletions and would like to shrink @@ -473,16 +485,18 @@ kept and reused as new (key,value) pairs are added. [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_reorganize_impl(dbmobject *self) -/*[clinic end generated code: output=38d9624df92e961d input=f6bea85bcfd40dd2]*/ +_gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=d77c69e8e3dd644a input=e1359faeef844e46]*/ { - check_dbmobject_open(self); + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + check_gdbmobject_open(self, state->gdbm_error); errno = 0; if (gdbm_reorganize(self->di_dbm) < 0) { if (errno != 0) - PyErr_SetFromErrno(DbmError); + PyErr_SetFromErrno(state->gdbm_error); else - PyErr_SetString(DbmError, gdbm_strerror(gdbm_errno)); + PyErr_SetString(state->gdbm_error, gdbm_strerror(gdbm_errno)); return NULL; } Py_RETURN_NONE; @@ -491,6 +505,8 @@ _gdbm_gdbm_reorganize_impl(dbmobject *self) /*[clinic input] _gdbm.gdbm.sync + cls: defining_class + Flush the database to the disk file. When the database has been opened in fast mode, this method forces @@ -498,29 +514,31 @@ any unwritten data to be written to the disk. [clinic start generated code]*/ static PyObject * -_gdbm_gdbm_sync_impl(dbmobject *self) -/*[clinic end generated code: output=488b15f47028f125 input=2a47d2c9e153ab8a]*/ +_gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls) +/*[clinic end generated code: output=bb680a2035c3f592 input=3d749235f79b6f2a]*/ { - check_dbmobject_open(self); + _gdbm_state *state = PyType_GetModuleState(cls); + assert(state != NULL); + check_gdbmobject_open(self, state->gdbm_error); gdbm_sync(self->di_dbm); Py_RETURN_NONE; } static PyObject * -dbm__enter__(PyObject *self, PyObject *args) +gdbm__enter__(PyObject *self, PyObject *args) { Py_INCREF(self); return self; } static PyObject * -dbm__exit__(PyObject *self, PyObject *args) +gdbm__exit__(PyObject *self, PyObject *args) { _Py_IDENTIFIER(close); return _PyObject_CallMethodIdNoArgs(self, &PyId_close); } -static PyMethodDef dbm_methods[] = { +static PyMethodDef gdbm_methods[] = { _GDBM_GDBM_CLOSE_METHODDEF _GDBM_GDBM_KEYS_METHODDEF _GDBM_GDBM_FIRSTKEY_METHODDEF @@ -529,46 +547,38 @@ static PyMethodDef dbm_methods[] = { _GDBM_GDBM_SYNC_METHODDEF _GDBM_GDBM_GET_METHODDEF _GDBM_GDBM_SETDEFAULT_METHODDEF - {"__enter__", dbm__enter__, METH_NOARGS, NULL}, - {"__exit__", dbm__exit__, METH_VARARGS, NULL}, + {"__enter__", gdbm__enter__, METH_NOARGS, NULL}, + {"__exit__", gdbm__exit__, METH_VARARGS, NULL}, {NULL, NULL} /* sentinel */ }; -static PyTypeObject Dbmtype = { - PyVarObject_HEAD_INIT(0, 0) - "_gdbm.gdbm", - sizeof(dbmobject), - 0, - (destructor)dbm_dealloc, /*tp_dealloc*/ - 0, /*tp_vectorcall_offset*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - 0, /*tp_as_async*/ - 0, /*tp_repr*/ - 0, /*tp_as_number*/ - &dbm_as_sequence, /*tp_as_sequence*/ - &dbm_as_mapping, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - 0, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT, /*tp_xxx4*/ - gdbm_object__doc__, /*tp_doc*/ - 0, /*tp_traverse*/ - 0, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - dbm_methods, /*tp_methods*/ +static PyType_Slot gdbmtype_spec_slots[] = { + {Py_tp_dealloc, gdbm_dealloc}, + {Py_tp_methods, gdbm_methods}, + {Py_sq_contains, gdbm_contains}, + {Py_mp_length, gdbm_length}, + {Py_mp_subscript, gdbm_subscript}, + {Py_mp_ass_subscript, gdbm_ass_sub}, + {Py_tp_doc, (char*)gdbm_object__doc__}, + {0, 0} +}; + +static PyType_Spec gdbmtype_spec = { + .name = "_gdbm.gdbm", + .basicsize = sizeof(gdbmobject), + // Calling PyType_GetModuleState() on a subclass is not safe. + // dbmtype_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = Py_TPFLAGS_DEFAULT, + .slots = gdbmtype_spec_slots, }; /* ----------------------------------------------------------------- */ /*[clinic input] _gdbm.open as dbmopen + filename: unicode flags: str="r" mode: int(py_default="0o666") = 0o666 @@ -601,9 +611,11 @@ when the database has to be created. It defaults to octal 0o666. static PyObject * dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, int mode) -/*[clinic end generated code: output=9527750f5df90764 input=3be0b0875974b928]*/ +/*[clinic end generated code: output=9527750f5df90764 input=812b7d74399ceb0e]*/ { int iflags; + _gdbm_state *state = get_gdbm_state(module); + assert(state != NULL); switch (flags[0]) { case 'r': @@ -619,7 +631,7 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, iflags = GDBM_NEWDB; break; default: - PyErr_SetString(DbmError, + PyErr_SetString(state->gdbm_error, "First flag must be one of 'r', 'w', 'c' or 'n'"); return NULL; } @@ -644,7 +656,7 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, default: PyOS_snprintf(buf, sizeof(buf), "Flag '%c' is not supported.", *flags); - PyErr_SetString(DbmError, buf); + PyErr_SetString(state->gdbm_error, buf); return NULL; } } @@ -659,12 +671,12 @@ dbmopen_impl(PyObject *module, PyObject *filename, const char *flags, PyErr_SetString(PyExc_ValueError, "embedded null character"); return NULL; } - PyObject *self = newdbmobject(name, iflags, mode); + PyObject *self = newgdbmobject(state, name, iflags, mode); Py_DECREF(filenamebytes); return self; } -static const char dbmmodule_open_flags[] = "rwcn" +static const char gdbmmodule_open_flags[] = "rwcn" #ifdef GDBM_FAST "f" #endif @@ -676,48 +688,30 @@ static const char dbmmodule_open_flags[] = "rwcn" #endif ; -static PyMethodDef dbmmodule_methods[] = { +static PyMethodDef _gdbm_module_methods[] = { DBMOPEN_METHODDEF { 0, 0 }, }; - -static struct PyModuleDef _gdbmmodule = { - PyModuleDef_HEAD_INIT, - "_gdbm", - gdbmmodule__doc__, - -1, - dbmmodule_methods, - NULL, - NULL, - NULL, - NULL -}; - -PyMODINIT_FUNC -PyInit__gdbm(void) { - PyObject *m; - - if (PyType_Ready(&Dbmtype) < 0) - return NULL; - m = PyModule_Create(&_gdbmmodule); - if (m == NULL) { - return NULL; +static int +_gdbm_exec(PyObject *module) +{ + _gdbm_state *state = get_gdbm_state(module); + state->gdbm_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &gdbmtype_spec, NULL); + if (state->gdbm_type == NULL) { + return -1; } - - DbmError = PyErr_NewException("_gdbm.error", PyExc_OSError, NULL); - if (DbmError == NULL) { - goto error; + state->gdbm_error = PyErr_NewException("_gdbm.error", PyExc_OSError, NULL); + if (state->gdbm_error == NULL) { + return -1; } - Py_INCREF(DbmError); - if (PyModule_AddObject(m, "error", DbmError) < 0) { - Py_DECREF(DbmError); - goto error; + if (PyModule_AddType(module, (PyTypeObject *)state->gdbm_error) < 0) { + return -1; } - - if (PyModule_AddStringConstant(m, "open_flags", - dbmmodule_open_flags) < 0) { - goto error; + if (PyModule_AddStringConstant(module, "open_flags", + gdbmmodule_open_flags) < 0) { + return -1; } #if defined(GDBM_VERSION_MAJOR) && defined(GDBM_VERSION_MINOR) && \ @@ -725,17 +719,59 @@ PyInit__gdbm(void) { PyObject *obj = Py_BuildValue("iii", GDBM_VERSION_MAJOR, GDBM_VERSION_MINOR, GDBM_VERSION_PATCH); if (obj == NULL) { - goto error; + return -1; } - if (PyModule_AddObject(m, "_GDBM_VERSION", obj) < 0) { + if (PyModule_AddObject(module, "_GDBM_VERSION", obj) < 0) { Py_DECREF(obj); - goto error; + return -1; } #endif + return 0; +} - return m; +static int +_gdbm_module_traverse(PyObject *module, visitproc visit, void *arg) +{ + _gdbm_state *state = get_gdbm_state(module); + Py_VISIT(state->gdbm_error); + Py_VISIT(state->gdbm_type); + return 0; +} -error: - Py_DECREF(m); - return NULL; +static int +_gdbm_module_clear(PyObject *module) +{ + _gdbm_state *state = get_gdbm_state(module); + Py_CLEAR(state->gdbm_error); + Py_CLEAR(state->gdbm_type); + return 0; +} + +static void +_gdbm_module_free(void *module) +{ + _gdbm_module_clear((PyObject *)module); +} + +static PyModuleDef_Slot _gdbm_module_slots[] = { + {Py_mod_exec, _gdbm_exec}, + {0, NULL} +}; + +static struct PyModuleDef _gdbmmodule = { + PyModuleDef_HEAD_INIT, + .m_name = "_gdbm", + .m_doc = gdbmmodule__doc__, + .m_size = sizeof(_gdbm_state), + .m_methods = _gdbm_module_methods, + .m_slots = _gdbm_module_slots, + .m_traverse = _gdbm_module_traverse, + .m_clear = _gdbm_module_clear, + .m_free = _gdbm_module_free, +}; + +PyMODINIT_FUNC +PyInit__gdbm(void) +{ + return PyModuleDef_Init(&_gdbmmodule); } diff --git a/Modules/clinic/_gdbmmodule.c.h b/Modules/clinic/_gdbmmodule.c.h index 195159104d995..ffd2179f36970 100644 --- a/Modules/clinic/_gdbmmodule.c.h +++ b/Modules/clinic/_gdbmmodule.c.h @@ -12,10 +12,10 @@ PyDoc_STRVAR(_gdbm_gdbm_get__doc__, {"get", (PyCFunction)(void(*)(void))_gdbm_gdbm_get, METH_FASTCALL, _gdbm_gdbm_get__doc__}, static PyObject * -_gdbm_gdbm_get_impl(dbmobject *self, PyObject *key, PyObject *default_value); +_gdbm_gdbm_get_impl(gdbmobject *self, PyObject *key, PyObject *default_value); static PyObject * -_gdbm_gdbm_get(dbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_gdbm_gdbm_get(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -46,11 +46,11 @@ PyDoc_STRVAR(_gdbm_gdbm_setdefault__doc__, {"setdefault", (PyCFunction)(void(*)(void))_gdbm_gdbm_setdefault, METH_FASTCALL, _gdbm_gdbm_setdefault__doc__}, static PyObject * -_gdbm_gdbm_setdefault_impl(dbmobject *self, PyObject *key, +_gdbm_gdbm_setdefault_impl(gdbmobject *self, PyObject *key, PyObject *default_value); static PyObject * -_gdbm_gdbm_setdefault(dbmobject *self, PyObject *const *args, Py_ssize_t nargs) +_gdbm_gdbm_setdefault(gdbmobject *self, PyObject *const *args, Py_ssize_t nargs) { PyObject *return_value = NULL; PyObject *key; @@ -81,10 +81,10 @@ PyDoc_STRVAR(_gdbm_gdbm_close__doc__, {"close", (PyCFunction)_gdbm_gdbm_close, METH_NOARGS, _gdbm_gdbm_close__doc__}, static PyObject * -_gdbm_gdbm_close_impl(dbmobject *self); +_gdbm_gdbm_close_impl(gdbmobject *self); static PyObject * -_gdbm_gdbm_close(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_gdbm_gdbm_close(gdbmobject *self, PyObject *Py_UNUSED(ignored)) { return _gdbm_gdbm_close_impl(self); } @@ -96,15 +96,26 @@ PyDoc_STRVAR(_gdbm_gdbm_keys__doc__, "Get a list of all keys in the database."); #define _GDBM_GDBM_KEYS_METHODDEF \ - {"keys", (PyCFunction)_gdbm_gdbm_keys, METH_NOARGS, _gdbm_gdbm_keys__doc__}, + {"keys", (PyCFunction)(void(*)(void))_gdbm_gdbm_keys, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_keys__doc__}, static PyObject * -_gdbm_gdbm_keys_impl(dbmobject *self); +_gdbm_gdbm_keys_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_keys(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_gdbm_gdbm_keys(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _gdbm_gdbm_keys_impl(self); + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":keys", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _gdbm_gdbm_keys_impl(self, cls); + +exit: + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_firstkey__doc__, @@ -118,15 +129,26 @@ PyDoc_STRVAR(_gdbm_gdbm_firstkey__doc__, "hash values, and won\'t be sorted by the key values."); #define _GDBM_GDBM_FIRSTKEY_METHODDEF \ - {"firstkey", (PyCFunction)_gdbm_gdbm_firstkey, METH_NOARGS, _gdbm_gdbm_firstkey__doc__}, + {"firstkey", (PyCFunction)(void(*)(void))_gdbm_gdbm_firstkey, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_firstkey__doc__}, static PyObject * -_gdbm_gdbm_firstkey_impl(dbmobject *self); +_gdbm_gdbm_firstkey_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_firstkey(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_gdbm_gdbm_firstkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _gdbm_gdbm_firstkey_impl(self); + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":firstkey", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _gdbm_gdbm_firstkey_impl(self, cls); + +exit: + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_nextkey__doc__, @@ -144,23 +166,26 @@ PyDoc_STRVAR(_gdbm_gdbm_nextkey__doc__, " k = db.nextkey(k)"); #define _GDBM_GDBM_NEXTKEY_METHODDEF \ - {"nextkey", (PyCFunction)_gdbm_gdbm_nextkey, METH_O, _gdbm_gdbm_nextkey__doc__}, + {"nextkey", (PyCFunction)(void(*)(void))_gdbm_gdbm_nextkey, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_nextkey__doc__}, static PyObject * -_gdbm_gdbm_nextkey_impl(dbmobject *self, const char *key, +_gdbm_gdbm_nextkey_impl(gdbmobject *self, PyTypeObject *cls, const char *key, Py_ssize_clean_t key_length); static PyObject * -_gdbm_gdbm_nextkey(dbmobject *self, PyObject *arg) +_gdbm_gdbm_nextkey(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { PyObject *return_value = NULL; + static const char * const _keywords[] = {"", NULL}; + static _PyArg_Parser _parser = {"s#:nextkey", _keywords, 0}; const char *key; Py_ssize_clean_t key_length; - if (!PyArg_Parse(arg, "s#:nextkey", &key, &key_length)) { + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, + &key, &key_length)) { goto exit; } - return_value = _gdbm_gdbm_nextkey_impl(self, key, key_length); + return_value = _gdbm_gdbm_nextkey_impl(self, cls, key, key_length); exit: return return_value; @@ -179,15 +204,26 @@ PyDoc_STRVAR(_gdbm_gdbm_reorganize__doc__, "kept and reused as new (key,value) pairs are added."); #define _GDBM_GDBM_REORGANIZE_METHODDEF \ - {"reorganize", (PyCFunction)_gdbm_gdbm_reorganize, METH_NOARGS, _gdbm_gdbm_reorganize__doc__}, + {"reorganize", (PyCFunction)(void(*)(void))_gdbm_gdbm_reorganize, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_reorganize__doc__}, static PyObject * -_gdbm_gdbm_reorganize_impl(dbmobject *self); +_gdbm_gdbm_reorganize_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_reorganize(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_gdbm_gdbm_reorganize(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _gdbm_gdbm_reorganize_impl(self); + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":reorganize", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _gdbm_gdbm_reorganize_impl(self, cls); + +exit: + return return_value; } PyDoc_STRVAR(_gdbm_gdbm_sync__doc__, @@ -200,15 +236,26 @@ PyDoc_STRVAR(_gdbm_gdbm_sync__doc__, "any unwritten data to be written to the disk."); #define _GDBM_GDBM_SYNC_METHODDEF \ - {"sync", (PyCFunction)_gdbm_gdbm_sync, METH_NOARGS, _gdbm_gdbm_sync__doc__}, + {"sync", (PyCFunction)(void(*)(void))_gdbm_gdbm_sync, METH_METHOD|METH_FASTCALL|METH_KEYWORDS, _gdbm_gdbm_sync__doc__}, static PyObject * -_gdbm_gdbm_sync_impl(dbmobject *self); +_gdbm_gdbm_sync_impl(gdbmobject *self, PyTypeObject *cls); static PyObject * -_gdbm_gdbm_sync(dbmobject *self, PyObject *Py_UNUSED(ignored)) +_gdbm_gdbm_sync(gdbmobject *self, PyTypeObject *cls, PyObject *const *args, Py_ssize_t nargs, PyObject *kwnames) { - return _gdbm_gdbm_sync_impl(self); + PyObject *return_value = NULL; + static const char * const _keywords[] = { NULL}; + static _PyArg_Parser _parser = {":sync", _keywords, 0}; + + if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser + )) { + goto exit; + } + return_value = _gdbm_gdbm_sync_impl(self, cls); + +exit: + return return_value; } PyDoc_STRVAR(dbmopen__doc__, @@ -293,4 +340,4 @@ dbmopen(PyObject *module, PyObject *const *args, Py_ssize_t nargs) exit: return return_value; } -/*[clinic end generated code: output=c9d43f42677f4efb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=e84bc6ac82fcb6d4 input=a9049054013a1b77]*/ From webhook-mailer at python.org Tue Jun 16 20:24:48 2020 From: webhook-mailer at python.org (Eric Snow) Date: Wed, 17 Jun 2020 00:24:48 -0000 Subject: [Python-checkins] bpo-32604: Clean up test.support.interpreters. (gh-20926) Message-ID: https://github.com/python/cpython/commit/818f5b597ae93411cc44e404544247d436026a00 commit: 818f5b597ae93411cc44e404544247d436026a00 branch: master author: Eric Snow committer: GitHub date: 2020-06-16T18:24:40-06:00 summary: bpo-32604: Clean up test.support.interpreters. (gh-20926) There were some minor adjustments needed and a few tests were missing. https://bugs.python.org/issue32604 files: M Lib/test/support/interpreters.py M Lib/test/test__xxsubinterpreters.py M Lib/test/test_interpreters.py diff --git a/Lib/test/support/interpreters.py b/Lib/test/support/interpreters.py index 09508e1bbeca0..2935708f9df1a 100644 --- a/Lib/test/support/interpreters.py +++ b/Lib/test/support/interpreters.py @@ -1,5 +1,6 @@ """Subinterpreters High Level Module.""" +import time import _xxsubinterpreters as _interpreters # aliases: @@ -19,47 +20,51 @@ def create(*, isolated=True): - """ - Initialize a new (idle) Python interpreter. - """ + """Return a new (idle) Python interpreter.""" id = _interpreters.create(isolated=isolated) return Interpreter(id, isolated=isolated) def list_all(): - """ - Get all existing interpreters. - """ - return [Interpreter(id) for id in - _interpreters.list_all()] + """Return all existing interpreters.""" + return [Interpreter(id) for id in _interpreters.list_all()] def get_current(): - """ - Get the currently running interpreter. - """ + """Return the currently running interpreter.""" id = _interpreters.get_current() return Interpreter(id) def get_main(): - """ - Get the main interpreter. - """ + """Return the main interpreter.""" id = _interpreters.get_main() return Interpreter(id) class Interpreter: - """ - The Interpreter object represents - a single interpreter. - """ + """A single Python interpreter.""" def __init__(self, id, *, isolated=None): + if not isinstance(id, (int, _interpreters.InterpreterID)): + raise TypeError(f'id must be an int, got {id!r}') self._id = id self._isolated = isolated + def __repr__(self): + data = dict(id=int(self._id), isolated=self._isolated) + kwargs = (f'{k}={v!r}' for k, v in data.items()) + return f'{type(self).__name__}({", ".join(kwargs)})' + + def __hash__(self): + return hash(self._id) + + def __eq__(self, other): + if not isinstance(other, Interpreter): + return NotImplemented + else: + return other._id == self._id + @property def id(self): return self._id @@ -67,84 +72,98 @@ def id(self): @property def isolated(self): if self._isolated is None: + # XXX The low-level function has not been added yet. + # See bpo-.... self._isolated = _interpreters.is_isolated(self._id) return self._isolated def is_running(self): - """ - Return whether or not the identified - interpreter is running. - """ + """Return whether or not the identified interpreter is running.""" return _interpreters.is_running(self._id) def close(self): - """ - Finalize and destroy the interpreter. + """Finalize and destroy the interpreter. - Attempting to destroy the current - interpreter results in a RuntimeError. + Attempting to destroy the current interpreter results + in a RuntimeError. """ return _interpreters.destroy(self._id) def run(self, src_str, /, *, channels=None): - """ - Run the given source code in the interpreter. + """Run the given source code in the interpreter. + This blocks the current Python thread until done. """ - _interpreters.run_string(self._id, src_str) + _interpreters.run_string(self._id, src_str, channels) def create_channel(): - """ - Create a new channel for passing data between - interpreters. - """ + """Return (recv, send) for a new cross-interpreter channel. + The channel may be used to pass data safely between interpreters. + """ cid = _interpreters.channel_create() - return (RecvChannel(cid), SendChannel(cid)) + recv, send = RecvChannel(cid), SendChannel(cid) + return recv, send def list_all_channels(): - """ - Get all open channels. - """ + """Return a list of (recv, send) for all open channels.""" return [(RecvChannel(cid), SendChannel(cid)) for cid in _interpreters.channel_list_all()] +class _ChannelEnd: + """The base class for RecvChannel and SendChannel.""" + + def __init__(self, id): + if not isinstance(id, (int, _interpreters.ChannelID)): + raise TypeError(f'id must be an int, got {id!r}') + self._id = id + + def __repr__(self): + return f'{type(self).__name__}(id={int(self._id)})' + + def __hash__(self): + return hash(self._id) + + def __eq__(self, other): + if isinstance(self, RecvChannel): + if not isinstance(other, RecvChannel): + return NotImplemented + elif not isinstance(other, SendChannel): + return NotImplemented + return other._id == self._id + + @property + def id(self): + return self._id + + _NOT_SET = object() -class RecvChannel: - """ - The RecvChannel object represents - a receiving channel. - """ +class RecvChannel(_ChannelEnd): + """The receiving end of a cross-interpreter channel.""" - def __init__(self, id): - self._id = id + def recv(self, *, _sentinel=object(), _delay=10 / 1000): # 10 milliseconds + """Return the next object from the channel. - def recv(self, *, _delay=10 / 1000): # 10 milliseconds - """ - Get the next object from the channel, - and wait if none have been sent. - Associate the interpreter with the channel. + This blocks until an object has been sent, if none have been + sent already. """ - import time - sentinel = object() - obj = _interpreters.channel_recv(self._id, sentinel) - while obj is sentinel: + obj = _interpreters.channel_recv(self._id, _sentinel) + while obj is _sentinel: time.sleep(_delay) - obj = _interpreters.channel_recv(self._id, sentinel) + obj = _interpreters.channel_recv(self._id, _sentinel) return obj def recv_nowait(self, default=_NOT_SET): - """ - Like recv(), but return the default - instead of waiting. + """Return the next object from the channel. - This function is blocked by a missing low-level - implementation of channel_recv_wait(). + If none have been sent then return the default if one + is provided or fail with ChannelEmptyError. Otherwise this + is the same as recv(). """ if default is _NOT_SET: return _interpreters.channel_recv(self._id) @@ -152,32 +171,27 @@ def recv_nowait(self, default=_NOT_SET): return _interpreters.channel_recv(self._id, default) -class SendChannel: - """ - The SendChannel object represents - a sending channel. - """ - - def __init__(self, id): - self._id = id +class SendChannel(_ChannelEnd): + """The sending end of a cross-interpreter channel.""" def send(self, obj): + """Send the object (i.e. its data) to the channel's receiving end. + + This blocks until the object is received. """ - Send the object (i.e. its data) to the receiving - end of the channel and wait. Associate the interpreter - with the channel. - """ - import time _interpreters.channel_send(self._id, obj) + # XXX We are missing a low-level channel_send_wait(). + # See bpo-32604 and gh-19829. + # Until that shows up we fake it: time.sleep(2) def send_nowait(self, obj): - """ - Like send(), but return False if not received. + """Send the object to the channel's receiving end. - This function is blocked by a missing low-level - implementation of channel_send_wait(). + If the object is immediately received then return True + (else False). Otherwise this is the same as send(). """ - - _interpreters.channel_send(self._id, obj) - return False + # XXX Note that at the moment channel_send() only ever returns + # None. This should be fixed when channel_send_wait() is added. + # See bpo-32604 and gh-19829. + return _interpreters.channel_send(self._id, obj) diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 7aec021fb19a5..550a847616cdc 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -759,21 +759,9 @@ def test_still_running(self): class RunStringTests(TestBase): - SCRIPT = dedent(""" - with open('{}', 'w') as out: - out.write('{}') - """) - FILENAME = 'spam' - def setUp(self): super().setUp() self.id = interpreters.create() - self._fs = None - - def tearDown(self): - if self._fs is not None: - self._fs.close() - super().tearDown() def test_success(self): script, file = _captured_script('print("it worked!", end="")') diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py index 3451a4c8759d8..58258bb66af8a 100644 --- a/Lib/test/test_interpreters.py +++ b/Lib/test/test_interpreters.py @@ -31,10 +31,10 @@ def clean_up_interpreters(): pass # already destroyed -def _run_output(interp, request, shared=None): +def _run_output(interp, request, channels=None): script, rpipe = _captured_script(request) with rpipe: - interp.run(script) + interp.run(script, channels=channels) return rpipe.read() @@ -68,25 +68,22 @@ class CreateTests(TestBase): def test_in_main(self): interp = interpreters.create() - lst = interpreters.list_all() - self.assertEqual(interp.id, lst[1].id) + self.assertIsInstance(interp, interpreters.Interpreter) + self.assertIn(interp, interpreters.list_all()) def test_in_thread(self): lock = threading.Lock() - id = None - interp = interpreters.create() - lst = interpreters.list_all() + interp = None def f(): - nonlocal id - id = interp.id + nonlocal interp + interp = interpreters.create() lock.acquire() lock.release() - t = threading.Thread(target=f) with lock: t.start() t.join() - self.assertEqual(interp.id, lst[1].id) + self.assertIn(interp, interpreters.list_all()) def test_in_subinterpreter(self): main, = interpreters.list_all() @@ -94,11 +91,10 @@ def test_in_subinterpreter(self): out = _run_output(interp, dedent(""" from test.support import interpreters interp = interpreters.create() - print(interp) + print(interp.id) """)) - interp2 = out.strip() - - self.assertEqual(len(set(interpreters.list_all())), len({main, interp, interp2})) + interp2 = interpreters.Interpreter(int(out)) + self.assertEqual(interpreters.list_all(), [main, interp, interp2]) def test_after_destroy_all(self): before = set(interpreters.list_all()) @@ -112,7 +108,7 @@ def test_after_destroy_all(self): interp.close() # Finally, create another. interp = interpreters.create() - self.assertEqual(len(set(interpreters.list_all())), len(before | {interp})) + self.assertEqual(set(interpreters.list_all()), before | {interp}) def test_after_destroy_some(self): before = set(interpreters.list_all()) @@ -125,15 +121,15 @@ def test_after_destroy_some(self): interp2.close() # Finally, create another. interp = interpreters.create() - self.assertEqual(len(set(interpreters.list_all())), len(before | {interp3, interp})) + self.assertEqual(set(interpreters.list_all()), before | {interp3, interp}) class GetCurrentTests(TestBase): def test_main(self): - main_interp_id = _interpreters.get_main() - cur_interp_id = interpreters.get_current().id - self.assertEqual(cur_interp_id, main_interp_id) + main = interpreters.get_main() + current = interpreters.get_current() + self.assertEqual(current, main) def test_subinterpreter(self): main = _interpreters.get_main() @@ -141,10 +137,10 @@ def test_subinterpreter(self): out = _run_output(interp, dedent(""" from test.support import interpreters cur = interpreters.get_current() - print(cur) + print(cur.id) """)) - cur = out.strip() - self.assertNotEqual(cur, main) + current = interpreters.Interpreter(int(out)) + self.assertNotEqual(current, main) class ListAllTests(TestBase): @@ -177,26 +173,75 @@ def test_after_destroying(self): self.assertEqual(ids, [main.id, second.id]) -class TestInterpreterId(TestBase): +class TestInterpreterAttrs(TestBase): - def test_in_main(self): - main = interpreters.get_current() - self.assertEqual(0, main.id) + def test_id_type(self): + main = interpreters.get_main() + current = interpreters.get_current() + interp = interpreters.create() + self.assertIsInstance(main.id, _interpreters.InterpreterID) + self.assertIsInstance(current.id, _interpreters.InterpreterID) + self.assertIsInstance(interp.id, _interpreters.InterpreterID) - def test_with_custom_num(self): + def test_main_id(self): + main = interpreters.get_main() + self.assertEqual(main.id, 0) + + def test_custom_id(self): interp = interpreters.Interpreter(1) - self.assertEqual(1, interp.id) + self.assertEqual(interp.id, 1) + + with self.assertRaises(TypeError): + interpreters.Interpreter('1') - def test_for_readonly_property(self): + def test_id_readonly(self): interp = interpreters.Interpreter(1) with self.assertRaises(AttributeError): interp.id = 2 + @unittest.skip('not ready yet (see bpo-32604)') + def test_main_isolated(self): + main = interpreters.get_main() + self.assertFalse(main.isolated) + + @unittest.skip('not ready yet (see bpo-32604)') + def test_subinterpreter_isolated_default(self): + interp = interpreters.create() + self.assertFalse(interp.isolated) + + def test_subinterpreter_isolated_explicit(self): + interp1 = interpreters.create(isolated=True) + interp2 = interpreters.create(isolated=False) + self.assertTrue(interp1.isolated) + self.assertFalse(interp2.isolated) + + @unittest.skip('not ready yet (see bpo-32604)') + def test_custom_isolated_default(self): + interp = interpreters.Interpreter(1) + self.assertFalse(interp.isolated) + + def test_custom_isolated_explicit(self): + interp1 = interpreters.Interpreter(1, isolated=True) + interp2 = interpreters.Interpreter(1, isolated=False) + self.assertTrue(interp1.isolated) + self.assertFalse(interp2.isolated) + + def test_isolated_readonly(self): + interp = interpreters.Interpreter(1) + with self.assertRaises(AttributeError): + interp.isolated = True + + def test_equality(self): + interp1 = interpreters.create() + interp2 = interpreters.create() + self.assertEqual(interp1, interp1) + self.assertNotEqual(interp1, interp2) + class TestInterpreterIsRunning(TestBase): def test_main(self): - main = interpreters.get_current() + main = interpreters.get_main() self.assertTrue(main.is_running()) def test_subinterpreter(self): @@ -224,16 +269,29 @@ def test_already_destroyed(self): with self.assertRaises(RuntimeError): interp.is_running() + def test_does_not_exist(self): + interp = interpreters.Interpreter(1_000_000) + with self.assertRaises(RuntimeError): + interp.is_running() + + def test_bad_id(self): + interp = interpreters.Interpreter(-1) + with self.assertRaises(ValueError): + interp.is_running() -class TestInterpreterDestroy(TestBase): + +class TestInterpreterClose(TestBase): def test_basic(self): + main = interpreters.get_main() interp1 = interpreters.create() interp2 = interpreters.create() interp3 = interpreters.create() - self.assertEqual(4, len(interpreters.list_all())) + self.assertEqual(set(interpreters.list_all()), + {main, interp1, interp2, interp3}) interp2.close() - self.assertEqual(3, len(interpreters.list_all())) + self.assertEqual(set(interpreters.list_all()), + {main, interp1, interp3}) def test_all(self): before = set(interpreters.list_all()) @@ -241,10 +299,10 @@ def test_all(self): for _ in range(3): interp = interpreters.create() interps.add(interp) - self.assertEqual(len(set(interpreters.list_all())), len(before | interps)) + self.assertEqual(set(interpreters.list_all()), before | interps) for interp in interps: interp.close() - self.assertEqual(len(set(interpreters.list_all())), len(before)) + self.assertEqual(set(interpreters.list_all()), before) def test_main(self): main, = interpreters.list_all() @@ -265,32 +323,44 @@ def test_already_destroyed(self): with self.assertRaises(RuntimeError): interp.close() + def test_does_not_exist(self): + interp = interpreters.Interpreter(1_000_000) + with self.assertRaises(RuntimeError): + interp.close() + + def test_bad_id(self): + interp = interpreters.Interpreter(-1) + with self.assertRaises(ValueError): + interp.close() + def test_from_current(self): main, = interpreters.list_all() interp = interpreters.create() - script = dedent(f""" + out = _run_output(interp, dedent(f""" from test.support import interpreters + interp = interpreters.Interpreter({int(interp.id)}) try: - main = interpreters.get_current() - main.close() + interp.close() except RuntimeError: - pass - """) - - interp.run(script) - self.assertEqual(len(set(interpreters.list_all())), len({main, interp})) + print('failed') + """)) + self.assertEqual(out.strip(), 'failed') + self.assertEqual(set(interpreters.list_all()), {main, interp}) def test_from_sibling(self): main, = interpreters.list_all() interp1 = interpreters.create() - script = dedent(f""" + interp2 = interpreters.create() + self.assertEqual(set(interpreters.list_all()), + {main, interp1, interp2}) + interp1.run(dedent(f""" from test.support import interpreters - interp2 = interpreters.create() + interp2 = interpreters.Interpreter(int({interp2.id})) interp2.close() - """) - interp1.run(script) - - self.assertEqual(len(set(interpreters.list_all())), len({main, interp1})) + interp3 = interpreters.create() + interp3.close() + """)) + self.assertEqual(set(interpreters.list_all()), {main, interp1}) def test_from_other_thread(self): interp = interpreters.create() @@ -312,41 +382,21 @@ def test_still_running(self): class TestInterpreterRun(TestBase): - SCRIPT = dedent(""" - with open('{}', 'w') as out: - out.write('{}') - """) - FILENAME = 'spam' - - def setUp(self): - super().setUp() - self.interp = interpreters.create() - self._fs = None - - def tearDown(self): - if self._fs is not None: - self._fs.close() - super().tearDown() - - @property - def fs(self): - if self._fs is None: - self._fs = FSFixture(self) - return self._fs - def test_success(self): + interp = interpreters.create() script, file = _captured_script('print("it worked!", end="")') with file: - self.interp.run(script) + interp.run(script) out = file.read() self.assertEqual(out, 'it worked!') def test_in_thread(self): + interp = interpreters.create() script, file = _captured_script('print("it worked!", end="")') with file: def f(): - self.interp.run(script) + interp.run(script) t = threading.Thread(target=f) t.start() @@ -357,6 +407,7 @@ def f(): @unittest.skipUnless(hasattr(os, 'fork'), "test needs os.fork()") def test_fork(self): + interp = interpreters.create() import tempfile with tempfile.NamedTemporaryFile('w+') as file: file.write('') @@ -371,24 +422,39 @@ def test_fork(self): with open('{file.name}', 'w') as out: out.write('{expected}') """) - self.interp.run(script) + interp.run(script) file.seek(0) content = file.read() self.assertEqual(content, expected) def test_already_running(self): - with _running(self.interp): + interp = interpreters.create() + with _running(interp): with self.assertRaises(RuntimeError): - self.interp.run('print("spam")') + interp.run('print("spam")') + + def test_does_not_exist(self): + interp = interpreters.Interpreter(1_000_000) + with self.assertRaises(RuntimeError): + interp.run('print("spam")') + + def test_bad_id(self): + interp = interpreters.Interpreter(-1) + with self.assertRaises(ValueError): + interp.run('print("spam")') def test_bad_script(self): + interp = interpreters.create() with self.assertRaises(TypeError): - self.interp.run(10) + interp.run(10) def test_bytes_for_script(self): + interp = interpreters.create() with self.assertRaises(TypeError): - self.interp.run(b'print("spam")') + interp.run(b'print("spam")') + + # test_xxsubinterpreters covers the remaining Interpreter.run() behavior. class TestIsShareable(TestBase): @@ -405,8 +471,8 @@ def test_default_shareables(self): ] for obj in shareables: with self.subTest(obj): - self.assertTrue( - interpreters.is_shareable(obj)) + shareable = interpreters.is_shareable(obj) + self.assertTrue(shareable) def test_not_shareable(self): class Cheese: @@ -441,22 +507,71 @@ class SubBytes(bytes): interpreters.is_shareable(obj)) -class TestChannel(TestBase): +class TestChannels(TestBase): - def test_create_cid(self): + def test_create(self): r, s = interpreters.create_channel() self.assertIsInstance(r, interpreters.RecvChannel) self.assertIsInstance(s, interpreters.SendChannel) - def test_sequential_ids(self): - before = interpreters.list_all_channels() - channels1 = interpreters.create_channel() - channels2 = interpreters.create_channel() - channels3 = interpreters.create_channel() - after = interpreters.list_all_channels() + def test_list_all(self): + self.assertEqual(interpreters.list_all_channels(), []) + created = set() + for _ in range(3): + ch = interpreters.create_channel() + created.add(ch) + after = set(interpreters.list_all_channels()) + self.assertEqual(after, created) + + +class TestRecvChannelAttrs(TestBase): + + def test_id_type(self): + rch, _ = interpreters.create_channel() + self.assertIsInstance(rch.id, _interpreters.ChannelID) + + def test_custom_id(self): + rch = interpreters.RecvChannel(1) + self.assertEqual(rch.id, 1) + + with self.assertRaises(TypeError): + interpreters.RecvChannel('1') + + def test_id_readonly(self): + rch = interpreters.RecvChannel(1) + with self.assertRaises(AttributeError): + rch.id = 2 + + def test_equality(self): + ch1, _ = interpreters.create_channel() + ch2, _ = interpreters.create_channel() + self.assertEqual(ch1, ch1) + self.assertNotEqual(ch1, ch2) + + +class TestSendChannelAttrs(TestBase): + + def test_id_type(self): + _, sch = interpreters.create_channel() + self.assertIsInstance(sch.id, _interpreters.ChannelID) - self.assertEqual(len(set(after) - set(before)), - len({channels1, channels2, channels3})) + def test_custom_id(self): + sch = interpreters.SendChannel(1) + self.assertEqual(sch.id, 1) + + with self.assertRaises(TypeError): + interpreters.SendChannel('1') + + def test_id_readonly(self): + sch = interpreters.SendChannel(1) + with self.assertRaises(AttributeError): + sch.id = 2 + + def test_equality(self): + _, ch1 = interpreters.create_channel() + _, ch2 = interpreters.create_channel() + self.assertEqual(ch1, ch1) + self.assertNotEqual(ch1, ch2) class TestSendRecv(TestBase): @@ -464,7 +579,7 @@ class TestSendRecv(TestBase): def test_send_recv_main(self): r, s = interpreters.create_channel() orig = b'spam' - s.send(orig) + s.send_nowait(orig) obj = r.recv() self.assertEqual(obj, orig) @@ -472,16 +587,40 @@ def test_send_recv_main(self): def test_send_recv_same_interpreter(self): interp = interpreters.create() - out = _run_output(interp, dedent(""" + interp.run(dedent(""" from test.support import interpreters r, s = interpreters.create_channel() orig = b'spam' - s.send(orig) + s.send_nowait(orig) obj = r.recv() - assert obj is not orig - assert obj == orig + assert obj == orig, 'expected: obj == orig' + assert obj is not orig, 'expected: obj is not orig' """)) + @unittest.skip('broken (see BPO-...)') + def test_send_recv_different_interpreters(self): + r1, s1 = interpreters.create_channel() + r2, s2 = interpreters.create_channel() + orig1 = b'spam' + s1.send_nowait(orig1) + out = _run_output( + interpreters.create(), + dedent(f""" + obj1 = r.recv() + assert obj1 == b'spam', 'expected: obj1 == orig1' + # When going to another interpreter we get a copy. + assert id(obj1) != {id(orig1)}, 'expected: obj1 is not orig1' + orig2 = b'eggs' + print(id(orig2)) + s.send_nowait(orig2) + """), + channels=dict(r=r1, s=s2), + ) + obj2 = r2.recv() + + self.assertEqual(obj2, b'eggs') + self.assertNotEqual(id(obj2), int(out)) + def test_send_recv_different_threads(self): r, s = interpreters.create_channel() @@ -496,40 +635,108 @@ def f(): t = threading.Thread(target=f) t.start() - s.send(b'spam') + orig = b'spam' + s.send(orig) t.join() obj = r.recv() - self.assertEqual(obj, b'spam') + self.assertEqual(obj, orig) + self.assertIsNot(obj, orig) def test_send_recv_nowait_main(self): r, s = interpreters.create_channel() orig = b'spam' - s.send(orig) + s.send_nowait(orig) obj = r.recv_nowait() self.assertEqual(obj, orig) self.assertIsNot(obj, orig) + def test_send_recv_nowait_main_with_default(self): + r, _ = interpreters.create_channel() + obj = r.recv_nowait(None) + + self.assertIsNone(obj) + def test_send_recv_nowait_same_interpreter(self): interp = interpreters.create() - out = _run_output(interp, dedent(""" + interp.run(dedent(""" from test.support import interpreters r, s = interpreters.create_channel() orig = b'spam' - s.send(orig) + s.send_nowait(orig) obj = r.recv_nowait() - assert obj is not orig - assert obj == orig + assert obj == orig, 'expected: obj == orig' + # When going back to the same interpreter we get the same object. + assert obj is not orig, 'expected: obj is not orig' """)) - r, s = interpreters.create_channel() - - def f(): - while True: - try: - obj = r.recv_nowait() - break - except _interpreters.ChannelEmptyError: - time.sleep(0.1) - s.send(obj) + @unittest.skip('broken (see BPO-...)') + def test_send_recv_nowait_different_interpreters(self): + r1, s1 = interpreters.create_channel() + r2, s2 = interpreters.create_channel() + orig1 = b'spam' + s1.send_nowait(orig1) + out = _run_output( + interpreters.create(), + dedent(f""" + obj1 = r.recv_nowait() + assert obj1 == b'spam', 'expected: obj1 == orig1' + # When going to another interpreter we get a copy. + assert id(obj1) != {id(orig1)}, 'expected: obj1 is not orig1' + orig2 = b'eggs' + print(id(orig2)) + s.send_nowait(orig2) + """), + channels=dict(r=r1, s=s2), + ) + obj2 = r2.recv_nowait() + + self.assertEqual(obj2, b'eggs') + self.assertNotEqual(id(obj2), int(out)) + + def test_recv_channel_does_not_exist(self): + ch = interpreters.RecvChannel(1_000_000) + with self.assertRaises(interpreters.ChannelNotFoundError): + ch.recv() + + def test_send_channel_does_not_exist(self): + ch = interpreters.SendChannel(1_000_000) + with self.assertRaises(interpreters.ChannelNotFoundError): + ch.send(b'spam') + + def test_recv_nowait_channel_does_not_exist(self): + ch = interpreters.RecvChannel(1_000_000) + with self.assertRaises(interpreters.ChannelNotFoundError): + ch.recv_nowait() + + def test_send_nowait_channel_does_not_exist(self): + ch = interpreters.SendChannel(1_000_000) + with self.assertRaises(interpreters.ChannelNotFoundError): + ch.send_nowait(b'spam') + + def test_recv_nowait_empty(self): + ch, _ = interpreters.create_channel() + with self.assertRaises(interpreters.ChannelEmptyError): + ch.recv_nowait() + + def test_recv_nowait_default(self): + default = object() + rch, sch = interpreters.create_channel() + obj1 = rch.recv_nowait(default) + sch.send_nowait(None) + sch.send_nowait(1) + sch.send_nowait(b'spam') + sch.send_nowait(b'eggs') + obj2 = rch.recv_nowait(default) + obj3 = rch.recv_nowait(default) + obj4 = rch.recv_nowait() + obj5 = rch.recv_nowait(default) + obj6 = rch.recv_nowait(default) + + self.assertIs(obj1, default) + self.assertIs(obj2, None) + self.assertEqual(obj3, 1) + self.assertEqual(obj4, b'spam') + self.assertEqual(obj5, b'eggs') + self.assertIs(obj6, default) From webhook-mailer at python.org Wed Jun 17 07:09:50 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Wed, 17 Jun 2020 11:09:50 -0000 Subject: [Python-checkins] bpo-36346: Add Py_DEPRECATED to deprecated unicode APIs (GH-20878) Message-ID: https://github.com/python/cpython/commit/2c4928d37edc5e4aeec3c0b79fa3460b1ec9b60d commit: 2c4928d37edc5e4aeec3c0b79fa3460b1ec9b60d branch: master author: Inada Naoki committer: GitHub date: 2020-06-17T20:09:44+09:00 summary: bpo-36346: Add Py_DEPRECATED to deprecated unicode APIs (GH-20878) Co-authored-by: Kyle Stanley Co-authored-by: Victor Stinner files: A Misc/NEWS.d/next/C API/2020-06-17-11-24-00.bpo-36346.fTMr3S.rst M Doc/whatsnew/3.9.rst M Include/cpython/unicodeobject.h M Modules/_testcapimodule.c M Objects/unicodeobject.c M Python/getargs.c diff --git a/Doc/whatsnew/3.9.rst b/Doc/whatsnew/3.9.rst index 67a83bc958457..15fca8fa9d4c9 100644 --- a/Doc/whatsnew/3.9.rst +++ b/Doc/whatsnew/3.9.rst @@ -1097,6 +1097,12 @@ Porting to Python 3.9 internal C API (``pycore_gc.h``). (Contributed by Victor Stinner in :issue:`40241`.) +* The ``Py_UNICODE_COPY``, ``Py_UNICODE_FILL``, ``PyUnicode_WSTR_LENGTH``, + :c:func:`PyUnicode_FromUnicode`, :c:func:`PyUnicode_AsUnicode`, + ``_PyUnicode_AsUnicode``, and :c:func:`PyUnicode_AsUnicodeAndSize` are + marked as deprecated in C. They have been deprecated by :pep:`393` since + Python 3.3. + (Contributed by Inada Naoki in :issue:`36346`.) Removed ------- @@ -1165,3 +1171,8 @@ Removed * Remove ``_PyUnicode_ClearStaticStrings()`` function. (Contributed by Victor Stinner in :issue:`39465`.) + +* Remove ``Py_UNICODE_MATCH``. It has been deprecated by :pep:`393`, and + broken since Python 3.3. The :c:func:`PyUnicode_Tailmatch` function can be + used instead. + (Contributed by Inada Naoki in :issue:`36346`.) diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 3b49ce7759037..569bdb1e2a94b 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -46,13 +46,17 @@ Py_UNICODE_ISDIGIT(ch) || \ Py_UNICODE_ISNUMERIC(ch)) -#define Py_UNICODE_COPY(target, source, length) \ - memcpy((target), (source), (length)*sizeof(Py_UNICODE)) - -#define Py_UNICODE_FILL(target, value, length) \ - do {Py_ssize_t i_; Py_UNICODE *t_ = (target); Py_UNICODE v_ = (value);\ - for (i_ = 0; i_ < (length); i_++) t_[i_] = v_;\ - } while (0) +Py_DEPRECATED(3.3) static inline void +Py_UNICODE_COPY(Py_UNICODE *target, const Py_UNICODE *source, Py_ssize_t length) { + memcpy(target, source, length * sizeof(Py_UNICODE)); +} + +Py_DEPRECATED(3.3) static inline void +Py_UNICODE_FILL(Py_UNICODE *target, Py_UNICODE value, Py_ssize_t length) { + for (Py_ssize_t i = 0; i < length; i++) { + target[i] = value; + } +} /* macros to work with surrogates */ #define Py_UNICODE_IS_SURROGATE(ch) (0xD800 <= (ch) && (ch) <= 0xDFFF) @@ -67,14 +71,6 @@ /* low surrogate = bottom 10 bits added to DC00 */ #define Py_UNICODE_LOW_SURROGATE(ch) (0xDC00 + ((ch) & 0x3FF)) -/* Check if substring matches at given offset. The offset must be - valid, and the substring must not be empty. */ - -#define Py_UNICODE_MATCH(string, offset, substring) \ - ((*((string)->wstr + (offset)) == *((substring)->wstr)) && \ - ((*((string)->wstr + (offset) + (substring)->wstr_length-1) == *((substring)->wstr + (substring)->wstr_length-1))) && \ - !memcmp((string)->wstr + (offset), (substring)->wstr, (substring)->wstr_length*sizeof(Py_UNICODE))) - /* --- Unicode Type ------------------------------------------------------- */ /* ASCII-only strings created through PyUnicode_New use the PyASCIIObject @@ -247,10 +243,6 @@ PyAPI_FUNC(int) _PyUnicode_CheckConsistency( int check_content); /* Fast access macros */ -#define PyUnicode_WSTR_LENGTH(op) \ - (PyUnicode_IS_COMPACT_ASCII(op) ? \ - ((PyASCIIObject*)op)->length : \ - ((PyCompactUnicodeObject*)op)->wstr_length) /* Returns the deprecated Py_UNICODE representation's size in code units (this includes surrogate pairs as 2 units). @@ -445,6 +437,14 @@ enum PyUnicode_Kind { (0xffffU) : \ (0x10ffffU))))) +Py_DEPRECATED(3.3) +static inline Py_ssize_t _PyUnicode_get_wstr_length(PyObject *op) { + return PyUnicode_IS_COMPACT_ASCII(op) ? + ((PyASCIIObject*)op)->length : + ((PyCompactUnicodeObject*)op)->wstr_length; +} +#define PyUnicode_WSTR_LENGTH(op) _PyUnicode_get_wstr_length((PyObject*)op) + /* === Public API ========================================================= */ /* --- Plain Py_UNICODE --------------------------------------------------- */ @@ -543,7 +543,7 @@ PyAPI_FUNC(void) _PyUnicode_FastFill( only allowed if u was set to NULL. The buffer is copied into the new object. */ -/* Py_DEPRECATED(3.3) */ PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode( +Py_DEPRECATED(3.3) PyAPI_FUNC(PyObject*) PyUnicode_FromUnicode( const Py_UNICODE *u, /* Unicode buffer */ Py_ssize_t size /* size of buffer */ ); @@ -572,13 +572,13 @@ PyAPI_FUNC(Py_UCS4) _PyUnicode_FindMaxChar ( Py_UNICODE buffer. If the wchar_t/Py_UNICODE representation is not yet available, this function will calculate it. */ -/* Py_DEPRECATED(3.3) */ PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode( +Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicode( PyObject *unicode /* Unicode object */ ); /* Similar to PyUnicode_AsUnicode(), but raises a ValueError if the string contains null characters. */ -PyAPI_FUNC(const Py_UNICODE *) _PyUnicode_AsUnicode( +Py_DEPRECATED(3.3) PyAPI_FUNC(const Py_UNICODE *) _PyUnicode_AsUnicode( PyObject *unicode /* Unicode object */ ); @@ -587,7 +587,7 @@ PyAPI_FUNC(const Py_UNICODE *) _PyUnicode_AsUnicode( If the wchar_t/Py_UNICODE representation is not yet available, this function will calculate it. */ -/* Py_DEPRECATED(3.3) */ PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicodeAndSize( +Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicodeAndSize( PyObject *unicode, /* Unicode object */ Py_ssize_t *size /* location where to save the length */ ); diff --git a/Misc/NEWS.d/next/C API/2020-06-17-11-24-00.bpo-36346.fTMr3S.rst b/Misc/NEWS.d/next/C API/2020-06-17-11-24-00.bpo-36346.fTMr3S.rst new file mode 100644 index 0000000000000..902a0e60727e6 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-17-11-24-00.bpo-36346.fTMr3S.rst @@ -0,0 +1,4 @@ +Mark ``Py_UNICODE_COPY``, ``Py_UNICODE_FILL``, ``PyUnicode_WSTR_LENGTH``, +``PyUnicode_FromUnicode``, ``PyUnicode_AsUnicode``, ``_PyUnicode_AsUnicode``, +and ``PyUnicode_AsUnicodeAndSize`` as deprecated in C. Remove ``Py_UNICODE_MATCH`` +which was deprecated and broken since Python 3.3. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index e0457ae5dfa55..5302641a9a37e 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -1668,6 +1668,10 @@ parse_tuple_and_keywords(PyObject *self, PyObject *args) static volatile int x; +/* Ignore use of deprecated APIs */ +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS + /* Test the u and u# codes for PyArg_ParseTuple. May leak memory in case of an error. */ @@ -1844,6 +1848,7 @@ test_widechar(PyObject *self, PyObject *Py_UNUSED(ignored)) Py_RETURN_NONE; } +_Py_COMP_DIAG_POP static PyObject * unicode_aswidechar(PyObject *self, PyObject *args) @@ -2064,6 +2069,10 @@ unicode_transformdecimaltoascii(PyObject *self, PyObject *args) return PyUnicode_TransformDecimalToASCII(unicode, length); } +/* Ignore use of deprecated APIs */ +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS + static PyObject * unicode_legacy_string(PyObject *self, PyObject *args) { @@ -2086,6 +2095,7 @@ unicode_legacy_string(PyObject *self, PyObject *args) return u; } +_Py_COMP_DIAG_POP static PyObject * getargs_w_star(PyObject *self, PyObject *args) diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index c75eb077e0c80..1433848c81f8e 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -120,6 +120,13 @@ extern "C" { _PyUnicode_UTF8_LENGTH(op)) #define _PyUnicode_WSTR(op) \ (((PyASCIIObject*)(op))->wstr) + +/* Don't use deprecated macro of unicodeobject.h */ +#undef PyUnicode_WSTR_LENGTH +#define PyUnicode_WSTR_LENGTH(op) \ + (PyUnicode_IS_COMPACT_ASCII(op) ? \ + ((PyASCIIObject*)op)->length : \ + ((PyCompactUnicodeObject*)op)->wstr_length) #define _PyUnicode_WSTR_LENGTH(op) \ (((PyCompactUnicodeObject*)(op))->wstr_length) #define _PyUnicode_LENGTH(op) \ @@ -970,11 +977,14 @@ ensure_unicode(PyObject *obj) #include "stringlib/find_max_char.h" #include "stringlib/undef.h" +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS #include "stringlib/unicodedefs.h" #include "stringlib/fastsearch.h" #include "stringlib/count.h" #include "stringlib/find.h" #include "stringlib/undef.h" +_Py_COMP_DIAG_POP /* --- Unicode Object ----------------------------------------------------- */ @@ -4097,6 +4107,11 @@ PyUnicode_AsUnicodeAndSize(PyObject *unicode, Py_ssize_t *size) return w; } +/* Deprecated APIs */ + +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS + Py_UNICODE * PyUnicode_AsUnicode(PyObject *unicode) { @@ -4135,6 +4150,8 @@ PyUnicode_GetSize(PyObject *unicode) return -1; } +_Py_COMP_DIAG_POP + Py_ssize_t PyUnicode_GetLength(PyObject *unicode) { @@ -12364,6 +12381,8 @@ PyUnicode_IsIdentifier(PyObject *self) return len && i == len; } else { +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS Py_ssize_t i = 0, len = PyUnicode_GET_SIZE(self); if (len == 0) { /* an empty string is not a valid identifier */ @@ -12401,6 +12420,7 @@ PyUnicode_IsIdentifier(PyObject *self) } } return 1; +_Py_COMP_DIAG_POP } } @@ -15955,7 +15975,10 @@ PyUnicode_AsUnicodeCopy(PyObject *unicode) PyErr_BadArgument(); return NULL; } +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS u = PyUnicode_AsUnicodeAndSize(unicode, &len); +_Py_COMP_DIAG_POP if (u == NULL) return NULL; /* Ensure we won't overflow the size. */ diff --git a/Python/getargs.c b/Python/getargs.c index d2dba49966d47..cf0cc0783687a 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -1027,6 +1027,9 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, case 'u': /* raw unicode buffer (Py_UNICODE *) */ case 'Z': /* raw unicode buffer or None */ { + // TODO: Raise DeprecationWarning +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS Py_UNICODE **p = va_arg(*p_va, Py_UNICODE **); if (*format == '#') { @@ -1066,6 +1069,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, arg, msgbuf, bufsize); } break; +_Py_COMP_DIAG_POP } case 'e': {/* encoded string */ From webhook-mailer at python.org Wed Jun 17 08:23:25 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 12:23:25 -0000 Subject: [Python-checkins] bpo-35059: Enhance _PyObject_GC_TRACK() macros (GH-20931) Message-ID: https://github.com/python/cpython/commit/07923f32b16ba39165a58a5f47e807ca04ae17aa commit: 07923f32b16ba39165a58a5f47e807ca04ae17aa branch: master author: Victor Stinner committer: GitHub date: 2020-06-17T14:23:04+02:00 summary: bpo-35059: Enhance _PyObject_GC_TRACK() macros (GH-20931) * Rename _PyObject_GC_TRACK_impl() to _PyObject_GC_TRACK() * Rename _PyObject_GC_UNTRACK_impl() to _PyObject_GC_UNTRACK() * Omit filename and lineno parameters if NDEBUG is defined. files: M Include/internal/pycore_object.h diff --git a/Include/internal/pycore_object.h b/Include/internal/pycore_object.h index 9740717b8a397..edd0031c3eff8 100644 --- a/Include/internal/pycore_object.h +++ b/Include/internal/pycore_object.h @@ -53,6 +53,8 @@ _PyObject_InitVar(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size) /* Tell the GC to track this object. + * + * The object must not be tracked by the GC. * * NB: While the object is tracked by the collector, it must be safe to call the * ob_traverse method. @@ -61,20 +63,24 @@ _PyObject_InitVar(PyVarObject *op, PyTypeObject *typeobj, Py_ssize_t size) * because it's not object header. So we don't use _PyGCHead_PREV() and * _PyGCHead_SET_PREV() for it to avoid unnecessary bitwise operations. * - * The PyObject_GC_Track() function is the public version of this macro. + * See also the public PyObject_GC_Track() function. */ -static inline void _PyObject_GC_TRACK_impl(const char *filename, int lineno, - PyObject *op) +static inline void _PyObject_GC_TRACK( +// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined +#ifndef NDEBUG + const char *filename, int lineno, +#endif + PyObject *op) { _PyObject_ASSERT_FROM(op, !_PyObject_GC_IS_TRACKED(op), "object already tracked by the garbage collector", - filename, lineno, "_PyObject_GC_TRACK"); + filename, lineno, __func__); PyGC_Head *gc = _Py_AS_GC(op); _PyObject_ASSERT_FROM(op, (gc->_gc_prev & _PyGC_PREV_MASK_COLLECTING) == 0, "object is in generation which is garbage collected", - filename, lineno, "_PyObject_GC_TRACK"); + filename, lineno, __func__); PyThreadState *tstate = _PyThreadState_GET(); PyGC_Head *generation0 = tstate->interp->gc.generation0; @@ -85,9 +91,6 @@ static inline void _PyObject_GC_TRACK_impl(const char *filename, int lineno, generation0->_gc_prev = (uintptr_t)gc; } -#define _PyObject_GC_TRACK(op) \ - _PyObject_GC_TRACK_impl(__FILE__, __LINE__, _PyObject_CAST(op)) - /* Tell the GC to stop tracking this object. * * Internal note: This may be called while GC. So _PyGC_PREV_MASK_COLLECTING @@ -95,14 +98,19 @@ static inline void _PyObject_GC_TRACK_impl(const char *filename, int lineno, * * The object must be tracked by the GC. * - * The PyObject_GC_UnTrack() function is the public version of this macro. + * See also the public PyObject_GC_UnTrack() which accept an object which is + * not tracked. */ -static inline void _PyObject_GC_UNTRACK_impl(const char *filename, int lineno, - PyObject *op) +static inline void _PyObject_GC_UNTRACK( +// The preprocessor removes _PyObject_ASSERT_FROM() calls if NDEBUG is defined +#ifndef NDEBUG + const char *filename, int lineno, +#endif + PyObject *op) { _PyObject_ASSERT_FROM(op, _PyObject_GC_IS_TRACKED(op), "object not tracked by the garbage collector", - filename, lineno, "_PyObject_GC_UNTRACK"); + filename, lineno, __func__); PyGC_Head *gc = _Py_AS_GC(op); PyGC_Head *prev = _PyGCHead_PREV(gc); @@ -113,8 +121,20 @@ static inline void _PyObject_GC_UNTRACK_impl(const char *filename, int lineno, gc->_gc_prev &= _PyGC_PREV_MASK_FINALIZED; } -#define _PyObject_GC_UNTRACK(op) \ - _PyObject_GC_UNTRACK_impl(__FILE__, __LINE__, _PyObject_CAST(op)) +// Macros to accept any type for the parameter, and to automatically pass +// the filename and the filename (if NDEBUG is not defined) where the macro +// is called. +#ifdef NDEBUG +# define _PyObject_GC_TRACK(op) \ + _PyObject_GC_TRACK(_PyObject_CAST(op)) +# define _PyObject_GC_UNTRACK(op) \ + _PyObject_GC_UNTRACK(_PyObject_CAST(op)) +#else +# define _PyObject_GC_TRACK(op) \ + _PyObject_GC_TRACK(__FILE__, __LINE__, _PyObject_CAST(op)) +# define _PyObject_GC_UNTRACK(op) \ + _PyObject_GC_UNTRACK(__FILE__, __LINE__, _PyObject_CAST(op)) +#endif #ifdef Py_REF_DEBUG extern void _PyDebug_PrintTotalRefs(void); From webhook-mailer at python.org Wed Jun 17 10:43:10 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Wed, 17 Jun 2020 14:43:10 -0000 Subject: [Python-checkins] bpo-36346: Make unicodeobject.h C89 compatible (GH-20934) Message-ID: https://github.com/python/cpython/commit/8e34e92caa73259620dd242b92d26edd0949b4ba commit: 8e34e92caa73259620dd242b92d26edd0949b4ba branch: master author: Inada Naoki committer: GitHub date: 2020-06-17T23:43:01+09:00 summary: bpo-36346: Make unicodeobject.h C89 compatible (GH-20934) files: M Include/cpython/unicodeobject.h diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 569bdb1e2a94b..7e53ccc9e63f0 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -53,7 +53,8 @@ Py_UNICODE_COPY(Py_UNICODE *target, const Py_UNICODE *source, Py_ssize_t length) Py_DEPRECATED(3.3) static inline void Py_UNICODE_FILL(Py_UNICODE *target, Py_UNICODE value, Py_ssize_t length) { - for (Py_ssize_t i = 0; i < length; i++) { + Py_ssize_t i; + for (i = 0; i < length; i++) { target[i] = value; } } From webhook-mailer at python.org Wed Jun 17 12:07:29 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 16:07:29 -0000 Subject: [Python-checkins] bpo-41003: Fix test_copyreg when numpy is installed (GH-20935) Message-ID: https://github.com/python/cpython/commit/8362893e3fe083df2ec8bb94c28b1a78383eadbf commit: 8362893e3fe083df2ec8bb94c28b1a78383eadbf branch: master author: Victor Stinner committer: GitHub date: 2020-06-17T18:07:13+02:00 summary: bpo-41003: Fix test_copyreg when numpy is installed (GH-20935) Fix test_copyreg when numpy is installed: test.pickletester now saves/restores warnings.filters when importing numpy, to ignore filters installed by numpy. Add the save_restore_warnings_filters() function to the test.support.warnings_helper module. files: A Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst M Lib/distutils/tests/__init__.py M Lib/test/pickletester.py M Lib/test/support/warnings_helper.py diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py index 5d2e69e3e6a8f..16d011fd9ee6e 100644 --- a/Lib/distutils/tests/__init__.py +++ b/Lib/distutils/tests/__init__.py @@ -15,26 +15,25 @@ import os import sys import unittest -import warnings from test.support import run_unittest +from test.support.warnings_helper import save_restore_warnings_filters here = os.path.dirname(__file__) or os.curdir def test_suite(): - old_filters = warnings.filters[:] suite = unittest.TestSuite() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): modname = "distutils.tests." + fn[:-3] - __import__(modname) + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources + # which adds a warnings filter. + with save_restore_warnings_filters(): + __import__(modname) module = sys.modules[modname] suite.addTest(module.test_suite()) - # bpo-40055: Save/restore warnings filters to leave them unchanged. - # Importing tests imports docutils which imports pkg_resources which adds a - # warnings filter. - warnings.filters[:] = old_filters return suite diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index ca566a28d60a9..a34505aab51c1 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -21,20 +21,26 @@ except ImportError: _testbuffer = None -try: - import numpy as np -except ImportError: - np = None - from test import support from test.support import ( TestFailed, TESTFN, run_with_locale, no_tracing, _2G, _4G, bigmemtest, forget, ) from test.support import threading_helper +from test.support.warnings_helper import save_restore_warnings_filters from pickle import bytes_types + +# bpo-41003: Save/restore warnings filters to leave them unchanged. +# Ignore filters installed by numpy. +try: + with save_restore_warnings_filters(): + import numpy as np +except ImportError: + np = None + + requires_32b = unittest.skipUnless(sys.maxsize < 2**32, "test is only meaningful on 32-bit builds") diff --git a/Lib/test/support/warnings_helper.py b/Lib/test/support/warnings_helper.py index c9f9045405b80..de23e6b452b28 100644 --- a/Lib/test/support/warnings_helper.py +++ b/Lib/test/support/warnings_helper.py @@ -178,3 +178,12 @@ def _filterwarnings(filters, quiet=False): if missing: raise AssertionError("filter (%r, %s) did not catch any warning" % missing[0]) + + + at contextlib.contextmanager +def save_restore_warnings_filters(): + old_filters = warnings.filters[:] + try: + yield + finally: + warnings.filters[:] = old_filters diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst b/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst new file mode 100644 index 0000000000000..6f908d99feaf7 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst @@ -0,0 +1,3 @@ +Fix ``test_copyreg`` when ``numpy`` is installed: ``test.pickletester`` now +saves/restores warnings filters when importing ``numpy``, to ignore filters +installed by ``numpy``. From webhook-mailer at python.org Wed Jun 17 13:09:19 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Wed, 17 Jun 2020 17:09:19 -0000 Subject: [Python-checkins] bpo-41009: fix requires_OS_version() class decorator (GH-20942) Message-ID: https://github.com/python/cpython/commit/bb6ec14479f18c32e71e43f2785f177aa17aabbd commit: bb6ec14479f18c32e71e43f2785f177aa17aabbd branch: master author: Christian Heimes committer: GitHub date: 2020-06-17T10:09:10-07:00 summary: bpo-41009: fix requires_OS_version() class decorator (GH-20942) Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: A Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 498da6415080f..da63d9281b107 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -299,26 +299,25 @@ def _requires_unix_version(sysname, min_version): For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if the FreeBSD version is less than 7.2. """ - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kw): - import platform - if platform.system() == sysname: - version_txt = platform.release().split('-', 1)[0] - try: - version = tuple(map(int, version_txt.split('.'))) - except ValueError: - pass - else: - if version < min_version: - min_version_txt = '.'.join(map(str, min_version)) - raise unittest.SkipTest( - "%s version %s or higher required, not %s" - % (sysname, min_version_txt, version_txt)) - return func(*args, **kw) - wrapper.min_version = min_version - return wrapper - return decorator + import platform + min_version_txt = '.'.join(map(str, min_version)) + version_txt = platform.release().split('-', 1)[0] + if platform.system() == sysname: + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + skip = False + else: + skip = version < min_version + else: + skip = False + + return unittest.skipIf( + skip, + f"{sysname} version {min_version_txt} or higher required, not " + f"{version_txt}" + ) + def requires_freebsd_version(*min_version): """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst new file mode 100644 index 0000000000000..1208c119a3556 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst @@ -0,0 +1,2 @@ +Fix use of ``support.require_{linux|mac|freebsd}_version()`` decorators as +class decorator. From webhook-mailer at python.org Wed Jun 17 13:09:58 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 17:09:58 -0000 Subject: [Python-checkins] bpo-41003: Fix test_copyreg when numpy is installed (GH-20935) (GH-20945) (GH-20946) Message-ID: https://github.com/python/cpython/commit/3d974b2fc681ddd0ec722cf631008d5941da52b8 commit: 3d974b2fc681ddd0ec722cf631008d5941da52b8 branch: 3.8 author: Victor Stinner committer: GitHub date: 2020-06-17T19:09:49+02:00 summary: bpo-41003: Fix test_copyreg when numpy is installed (GH-20935) (GH-20945) (GH-20946) Fix test_copyreg when numpy is installed: test.pickletester now saves/restores warnings.filters when importing numpy, to ignore filters installed by numpy. Add the save_restore_warnings_filters() function to the test.support.warnings_helper module. (cherry picked from commit 8362893e3fe083df2ec8bb94c28b1a78383eadbf) (cherry picked from commit b39d41ba1b77f7bc51c4d6f6d0e336693192cb3a) files: A Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst M Lib/distutils/tests/__init__.py M Lib/test/pickletester.py M Lib/test/support/__init__.py diff --git a/Lib/distutils/tests/__init__.py b/Lib/distutils/tests/__init__.py index 5d2e69e3e6a8f..68037216c7d06 100644 --- a/Lib/distutils/tests/__init__.py +++ b/Lib/distutils/tests/__init__.py @@ -15,26 +15,24 @@ import os import sys import unittest -import warnings -from test.support import run_unittest +from test.support import run_unittest, save_restore_warnings_filters here = os.path.dirname(__file__) or os.curdir def test_suite(): - old_filters = warnings.filters[:] suite = unittest.TestSuite() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): modname = "distutils.tests." + fn[:-3] - __import__(modname) + # bpo-40055: Save/restore warnings filters to leave them unchanged. + # Importing tests imports docutils which imports pkg_resources + # which adds a warnings filter. + with save_restore_warnings_filters(): + __import__(modname) module = sys.modules[modname] suite.addTest(module.test_suite()) - # bpo-40055: Save/restore warnings filters to leave them unchanged. - # Importing tests imports docutils which imports pkg_resources which adds a - # warnings filter. - warnings.filters[:] = old_filters return suite diff --git a/Lib/test/pickletester.py b/Lib/test/pickletester.py index 7c8383f3eaac9..9401043d78d18 100644 --- a/Lib/test/pickletester.py +++ b/Lib/test/pickletester.py @@ -21,19 +21,25 @@ except ImportError: _testbuffer = None -try: - import numpy as np -except ImportError: - np = None - from test import support from test.support import ( TestFailed, TESTFN, run_with_locale, no_tracing, _2G, _4G, bigmemtest, reap_threads, forget, + save_restore_warnings_filters ) from pickle import bytes_types + +# bpo-41003: Save/restore warnings filters to leave them unchanged. +# Ignore filters installed by numpy. +try: + with save_restore_warnings_filters(): + import numpy as np +except ImportError: + np = None + + requires_32b = unittest.skipUnless(sys.maxsize < 2**32, "test is only meaningful on 32-bit builds") diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 08d53ec7064b4..0906e7adbae9b 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -3368,3 +3368,12 @@ def __exit__(self, *exc_info): del self.exc_value del self.exc_traceback del self.thread + + + at contextlib.contextmanager +def save_restore_warnings_filters(): + old_filters = warnings.filters[:] + try: + yield + finally: + warnings.filters[:] = old_filters diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst b/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst new file mode 100644 index 0000000000000..6f908d99feaf7 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst @@ -0,0 +1,3 @@ +Fix ``test_copyreg`` when ``numpy`` is installed: ``test.pickletester`` now +saves/restores warnings filters when importing ``numpy``, to ignore filters +installed by ``numpy``. From webhook-mailer at python.org Wed Jun 17 13:10:56 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 17:10:56 -0000 Subject: [Python-checkins] bpo-41006: collections imports lazily heap (GH-20940) Message-ID: https://github.com/python/cpython/commit/7824cc05bfe7f8181b21848a52007ddaf5612b9b commit: 7824cc05bfe7f8181b21848a52007ddaf5612b9b branch: master author: Victor Stinner committer: GitHub date: 2020-06-17T19:10:47+02:00 summary: bpo-41006: collections imports lazily heap (GH-20940) The collections module now imports lazily the heapq modules in the Counter.most_common() method to speedup Python startup time. files: M Lib/collections/__init__.py diff --git a/Lib/collections/__init__.py b/Lib/collections/__init__.py index 42d0ec777c3f7..5d75501645fc4 100644 --- a/Lib/collections/__init__.py +++ b/Lib/collections/__init__.py @@ -27,7 +27,6 @@ ] import _collections_abc -import heapq as _heapq import sys as _sys from itertools import chain as _chain @@ -608,7 +607,10 @@ def most_common(self, n=None): # Emulate Bag.sortedByCount from Smalltalk if n is None: return sorted(self.items(), key=_itemgetter(1), reverse=True) - return _heapq.nlargest(n, self.items(), key=_itemgetter(1)) + + # Lazy import to speedup Python startup time + import heapq + return heapq.nlargest(n, self.items(), key=_itemgetter(1)) def elements(self): '''Iterator over elements repeating each as many times as its count. From webhook-mailer at python.org Wed Jun 17 13:11:59 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 17:11:59 -0000 Subject: [Python-checkins] bpo-41006: pkgutil imports lazily re (GH-20939) Message-ID: https://github.com/python/cpython/commit/98ce7b107e6611d04dc35a4f5b02ea215ef122cf commit: 98ce7b107e6611d04dc35a4f5b02ea215ef122cf branch: master author: Victor Stinner committer: GitHub date: 2020-06-17T19:11:50+02:00 summary: bpo-41006: pkgutil imports lazily re (GH-20939) The pkgutil module now imports lazily the re module to speedup Python startup time. files: M Lib/pkgutil.py diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py index 4c184678a2912..3d7f19f39981d 100644 --- a/Lib/pkgutil.py +++ b/Lib/pkgutil.py @@ -7,7 +7,6 @@ import importlib.machinery import os import os.path -import re import sys from types import ModuleType import warnings @@ -638,9 +637,7 @@ def get_data(package, resource): return loader.get_data(resource_name) -_DOTTED_WORDS = r'(?!\d)(\w+)(\.(?!\d)(\w+))*' -_NAME_PATTERN = re.compile(f'^(?P{_DOTTED_WORDS})(?P:(?P{_DOTTED_WORDS})?)?$', re.U) -del _DOTTED_WORDS +_NAME_PATTERN = None def resolve_name(name): """ @@ -674,6 +671,15 @@ def resolve_name(name): AttributeError - if a failure occurred when traversing the object hierarchy within the imported package to get to the desired object) """ + global _NAME_PATTERN + if _NAME_PATTERN is None: + # Lazy import to speedup Python startup time + import re + dotted_words = r'(?!\d)(\w+)(\.(?!\d)(\w+))*' + _NAME_PATTERN = re.compile(f'^(?P{dotted_words})' + f'(?P:(?P{dotted_words})?)?$', + re.UNICODE) + m = _NAME_PATTERN.match(name) if not m: raise ValueError(f'invalid format: {name!r}') From webhook-mailer at python.org Wed Jun 17 13:26:13 2020 From: webhook-mailer at python.org (stratakis) Date: Wed, 17 Jun 2020 17:26:13 -0000 Subject: [Python-checkins] bpo-40637: Do not emit warnings for disabled builtin hashes (GH-20937) Message-ID: https://github.com/python/cpython/commit/236a0f5cf022b59dbb6ea17a8e7a677c573d39b9 commit: 236a0f5cf022b59dbb6ea17a8e7a677c573d39b9 branch: master author: stratakis committer: GitHub date: 2020-06-17T10:26:08-07:00 summary: bpo-40637: Do not emit warnings for disabled builtin hashes (GH-20937) test_hashlib emits some warnings when it cannot find some hashes as it assumes they failed to compile. Since we can disable hashes through configure, we emit the warnings only in the case that we did not intentionaly disable them. Automerge-Triggered-By: @tiran files: M Lib/test/test_hashlib.py diff --git a/Lib/test/test_hashlib.py b/Lib/test/test_hashlib.py index 2f79244748e68..ba902986adb81 100644 --- a/Lib/test/test_hashlib.py +++ b/Lib/test/test_hashlib.py @@ -103,7 +103,7 @@ def _conditional_import_module(self, module_name): try: return importlib.import_module(module_name) except ModuleNotFoundError as error: - if self._warn_on_extension_import: + if self._warn_on_extension_import and module_name in builtin_hashes: warnings.warn('Did a C extension fail to compile? %s' % error) return None From webhook-mailer at python.org Wed Jun 17 17:16:08 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 21:16:08 -0000 Subject: [Python-checkins] bpo-41006: importlib.util no longer imports typing (GH-20938) Message-ID: https://github.com/python/cpython/commit/9e09849d20987c131b28bcdd252e53440d4cd1b3 commit: 9e09849d20987c131b28bcdd252e53440d4cd1b3 branch: master author: Victor Stinner committer: GitHub date: 2020-06-17T23:15:59+02:00 summary: bpo-41006: importlib.util no longer imports typing (GH-20938) Create importlib._abc submodule to avoid importing typing when importlib.util is imported. Move Loader ABC into importlib._abc. files: A Lib/importlib/_abc.py M Lib/importlib/abc.py M Lib/importlib/util.py M Lib/test/test_importlib/test_spec.py diff --git a/Lib/importlib/_abc.py b/Lib/importlib/_abc.py new file mode 100644 index 0000000000000..fb5ec727cea6e --- /dev/null +++ b/Lib/importlib/_abc.py @@ -0,0 +1,50 @@ +"""Subset of importlib.abc used to reduce importlib.util imports.""" +from . import _bootstrap +import abc + + +class Loader(metaclass=abc.ABCMeta): + + """Abstract base class for import loaders.""" + + def create_module(self, spec): + """Return a module to initialize and into which to load. + + This method should raise ImportError if anything prevents it + from creating a new module. It may return None to indicate + that the spec should create the new module. + """ + # By default, defer to default semantics for the new module. + return None + + # We don't define exec_module() here since that would break + # hasattr checks we do to support backward compatibility. + + def load_module(self, fullname): + """Return the loaded module. + + The module must be added to sys.modules and have import-related + attributes set properly. The fullname is a str. + + ImportError is raised on failure. + + This method is deprecated in favor of loader.exec_module(). If + exec_module() exists then it is used to provide a backwards-compatible + functionality for this method. + + """ + if not hasattr(self, 'exec_module'): + raise ImportError + return _bootstrap._load_module_shim(self, fullname) + + def module_repr(self, module): + """Return a module's repr. + + Used by the module type when the method does not raise + NotImplementedError. + + This method is deprecated. + + """ + # The exception will cause ModuleType.__repr__ to ignore this method. + raise NotImplementedError diff --git a/Lib/importlib/abc.py b/Lib/importlib/abc.py index 0b20e7c13f282..97d5afa300193 100644 --- a/Lib/importlib/abc.py +++ b/Lib/importlib/abc.py @@ -12,6 +12,7 @@ import _frozen_importlib_external except ImportError: _frozen_importlib_external = _bootstrap_external +from ._abc import Loader import abc import warnings from typing import Protocol, runtime_checkable @@ -134,53 +135,6 @@ def invalidate_caches(self): _register(PathEntryFinder, machinery.FileFinder) -class Loader(metaclass=abc.ABCMeta): - - """Abstract base class for import loaders.""" - - def create_module(self, spec): - """Return a module to initialize and into which to load. - - This method should raise ImportError if anything prevents it - from creating a new module. It may return None to indicate - that the spec should create the new module. - """ - # By default, defer to default semantics for the new module. - return None - - # We don't define exec_module() here since that would break - # hasattr checks we do to support backward compatibility. - - def load_module(self, fullname): - """Return the loaded module. - - The module must be added to sys.modules and have import-related - attributes set properly. The fullname is a str. - - ImportError is raised on failure. - - This method is deprecated in favor of loader.exec_module(). If - exec_module() exists then it is used to provide a backwards-compatible - functionality for this method. - - """ - if not hasattr(self, 'exec_module'): - raise ImportError - return _bootstrap._load_module_shim(self, fullname) - - def module_repr(self, module): - """Return a module's repr. - - Used by the module type when the method does not raise - NotImplementedError. - - This method is deprecated. - - """ - # The exception will cause ModuleType.__repr__ to ignore this method. - raise NotImplementedError - - class ResourceLoader(Loader): """Abstract base class for loaders which can return data from their diff --git a/Lib/importlib/util.py b/Lib/importlib/util.py index 269a6fa930aab..1e44843a687f2 100644 --- a/Lib/importlib/util.py +++ b/Lib/importlib/util.py @@ -1,5 +1,5 @@ """Utility code for constructing importers, etc.""" -from . import abc +from ._abc import Loader from ._bootstrap import module_from_spec from ._bootstrap import _resolve_name from ._bootstrap import spec_from_loader @@ -263,7 +263,7 @@ def __delattr__(self, attr): delattr(self, attr) -class LazyLoader(abc.Loader): +class LazyLoader(Loader): """A loader that creates a module which defers loading until attribute access.""" diff --git a/Lib/test/test_importlib/test_spec.py b/Lib/test/test_importlib/test_spec.py index 5a16a03de60fa..20dacec8664e1 100644 --- a/Lib/test/test_importlib/test_spec.py +++ b/Lib/test/test_importlib/test_spec.py @@ -650,8 +650,8 @@ def test_spec_from_file_location_default(self): # Need to use a circuitous route to get at importlib.machinery to make # sure the same class object is used in the isinstance() check as # would have been used to create the loader. - self.assertIsInstance(spec.loader, - self.util.abc.machinery.SourceFileLoader) + SourceFileLoader = self.util.spec_from_file_location.__globals__['SourceFileLoader'] + self.assertIsInstance(spec.loader, SourceFileLoader) self.assertEqual(spec.loader.name, self.name) self.assertEqual(spec.loader.path, self.path) self.assertEqual(spec.origin, self.path) From webhook-mailer at python.org Wed Jun 17 17:59:06 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 21:59:06 -0000 Subject: [Python-checkins] bpo-41006: Document the runpy optimization (GH-20953) Message-ID: https://github.com/python/cpython/commit/4c18fc8f1def7030e5ec5d1ffb9355d7453dc408 commit: 4c18fc8f1def7030e5ec5d1ffb9355d7453dc408 branch: master author: Victor Stinner committer: GitHub date: 2020-06-17T23:58:58+02:00 summary: bpo-41006: Document the runpy optimization (GH-20953) files: M Doc/whatsnew/3.10.rst diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 9878f7f81ceda..1f4840fc0fb8e 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -103,6 +103,11 @@ Improved Modules Optimizations ============= +* The :mod:`runpy` module now imports less modules. + The ``python3 -m module-name`` command startup time is 1.3x faster in + average. + (Contributed by Victor Stinner in :issue:`41006`.) + Deprecated ========== From webhook-mailer at python.org Wed Jun 17 19:11:55 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 23:11:55 -0000 Subject: [Python-checkins] bpo-41006: Remove init_sys_streams() hack (GH-20954) Message-ID: https://github.com/python/cpython/commit/1bf7959dce0597e312c6f35476a7cc957fd0323c commit: 1bf7959dce0597e312c6f35476a7cc957fd0323c branch: master author: Victor Stinner committer: GitHub date: 2020-06-18T01:11:46+02:00 summary: bpo-41006: Remove init_sys_streams() hack (GH-20954) The encodings.latin_1 module is no longer imported at startup. Now it is only imported when it is the filesystem encoding or the stdio encoding. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-18-00-07-09.bpo-41006.H-wN-d.rst M Python/pylifecycle.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-18-00-07-09.bpo-41006.H-wN-d.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-18-00-07-09.bpo-41006.H-wN-d.rst new file mode 100644 index 0000000000000..4593e6bb89a9e --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-18-00-07-09.bpo-41006.H-wN-d.rst @@ -0,0 +1,2 @@ +The ``encodings.latin_1`` module is no longer imported at startup. Now it is +only imported when it is the filesystem encoding or the stdio encoding. diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index c754f2169dece..87f25e623f570 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1939,7 +1939,6 @@ static PyStatus init_sys_streams(PyThreadState *tstate) { PyObject *iomod = NULL; - PyObject *m; PyObject *std = NULL; int fd; PyObject * encoding_attr; @@ -1959,18 +1958,6 @@ init_sys_streams(PyThreadState *tstate) } #endif - /* Hack to avoid a nasty recursion issue when Python is invoked - in verbose mode: pre-import the Latin-1 and UTF-8 codecs */ - if ((m = PyImport_ImportModule("encodings.utf_8")) == NULL) { - goto error; - } - Py_DECREF(m); - - if (!(m = PyImport_ImportModule("encodings.latin_1"))) { - goto error; - } - Py_DECREF(m); - if (!(iomod = PyImport_ImportModule("io"))) { goto error; } From webhook-mailer at python.org Wed Jun 17 19:20:58 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 17 Jun 2020 23:20:58 -0000 Subject: [Python-checkins] bpo-41006: What's New: less => fewer modules (GH-20955) Message-ID: https://github.com/python/cpython/commit/2c2a4f3d8545784c6e4ca8128bfc706916080712 commit: 2c2a4f3d8545784c6e4ca8128bfc706916080712 branch: master author: Victor Stinner committer: GitHub date: 2020-06-18T01:20:51+02:00 summary: bpo-41006: What's New: less => fewer modules (GH-20955) Typo spotted by Eric V. Smith ;-) files: M Doc/whatsnew/3.10.rst diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 1f4840fc0fb8e..f956ddd45dca9 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -103,7 +103,7 @@ Improved Modules Optimizations ============= -* The :mod:`runpy` module now imports less modules. +* The :mod:`runpy` module now imports fewer modules. The ``python3 -m module-name`` command startup time is 1.3x faster in average. (Contributed by Victor Stinner in :issue:`41006`.) From webhook-mailer at python.org Wed Jun 17 23:51:45 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 18 Jun 2020 03:51:45 -0000 Subject: [Python-checkins] Synchronize macOS installer with 3.9 Message-ID: https://github.com/python/cpython/commit/62855d53d223980f6ea8fbcb94fa25cbd86f3cab commit: 62855d53d223980f6ea8fbcb94fa25cbd86f3cab branch: 3.7 author: Ned Deily committer: Ned Deily date: 2020-06-16T21:57:09-04:00 summary: Synchronize macOS installer with 3.9 files: M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 8bde04ee04630..d0b356614b578 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -1066,13 +1066,40 @@ def buildPythonDocs(): curDir = os.getcwd() os.chdir(buildDir) runCommand('make clean') - # Create virtual environment for docs builds with blurb and sphinx - runCommand('make venv') - runCommand('make html PYTHON=venv/bin/python') + + # Search third-party source directory for a pre-built version of the docs. + # Use the naming convention of the docs.python.org html downloads: + # python-3.9.0b1-docs-html.tar.bz2 + doctarfiles = [ f for f in os.listdir(DEPSRC) + if f.startswith('python-'+getFullVersion()) + if f.endswith('-docs-html.tar.bz2') ] + if doctarfiles: + doctarfile = doctarfiles[0] + if not os.path.exists('build'): + os.mkdir('build') + # if build directory existed, it was emptied by make clean, above + os.chdir('build') + # Extract the first archive found for this version into build + runCommand('tar xjf %s'%shellQuote(os.path.join(DEPSRC, doctarfile))) + # see if tar extracted a directory ending in -docs-html + archivefiles = [ f for f in os.listdir('.') + if f.endswith('-docs-html') + if os.path.isdir(f) ] + if archivefiles: + archivefile = archivefiles[0] + # make it our 'Docs/build/html' directory + print(' -- using pre-built python documentation from %s'%archivefile) + os.rename(archivefile, 'html') + os.chdir(buildDir) + + htmlDir = os.path.join('build', 'html') + if not os.path.exists(htmlDir): + # Create virtual environment for docs builds with blurb and sphinx + runCommand('make venv') + runCommand('venv/bin/python3 -m pip install -U Sphinx==2.3.1') + runCommand('make html PYTHON=venv/bin/python') + os.rename(htmlDir, docdir) os.chdir(curDir) - if not os.path.exists(docdir): - os.mkdir(docdir) - os.rename(os.path.join(buildDir, 'build', 'html'), docdir) def buildPython(): @@ -1217,7 +1244,8 @@ def buildPython(): if ln.startswith('VERSION='): VERSION=ln.split()[1] if ln.startswith('ABIFLAGS='): - ABIFLAGS=ln.split()[1] + ABIFLAGS=ln.split() + ABIFLAGS=ABIFLAGS[1] if len(ABIFLAGS) > 1 else '' if ln.startswith('LDVERSION='): LDVERSION=ln.split()[1] fp.close() @@ -1268,7 +1296,8 @@ def buildPython(): import pprint if getVersionMajorMinor() >= (3, 6): # XXX this is extra-fragile - path = os.path.join(path_to_lib, '_sysconfigdata_m_darwin_darwin.py') + path = os.path.join(path_to_lib, + '_sysconfigdata_%s_darwin_darwin.py' % (ABIFLAGS,)) else: path = os.path.join(path_to_lib, '_sysconfigdata.py') fp = open(path, 'r') @@ -1306,12 +1335,6 @@ def buildPython(): os.chdir(curdir) - if PYTHON_3: - # Remove the 'Current' link, that way we don't accidentally mess - # with an already installed version of python 2 - os.unlink(os.path.join(rootDir, 'Library', 'Frameworks', - 'Python.framework', 'Versions', 'Current')) - def patchFile(inPath, outPath): data = fileContents(inPath) data = data.replace('$FULL_VERSION', getFullVersion()) From webhook-mailer at python.org Wed Jun 17 23:56:08 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 18 Jun 2020 03:56:08 -0000 Subject: [Python-checkins] 3.6.11rc1 Message-ID: https://github.com/python/cpython/commit/d384df407ebdbb1ab386597658f1ac78e8803afe commit: d384df407ebdbb1ab386597658f1ac78e8803afe branch: 3.6 author: Ned Deily committer: Ned Deily date: 2020-06-17T06:59:51-04:00 summary: 3.6.11rc1 files: A Misc/NEWS.d/3.6.11rc1.rst D Misc/NEWS.d/next/Core and Builtins/2020-01-22-15-53-37.bpo-39421.O3nG7u.rst D Misc/NEWS.d/next/Core and Builtins/2020-02-04-10-27-41.bpo-39510.PMIh-f.rst D Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst D Misc/NEWS.d/next/Security/2020-01-28-20-54-09.bpo-39401.he7h_A.rst D Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst D Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst D Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst M Doc/Makefile M Include/patchlevel.h M Lib/pydoc_data/topics.py M README.rst diff --git a/Doc/Makefile b/Doc/Makefile index 307d1e0e7de10..efd31a9c79f2c 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -123,7 +123,7 @@ clean: venv: $(PYTHON) -m venv $(VENVDIR) - $(VENVDIR)/bin/python3 -m pip install -U Sphinx blurb + $(VENVDIR)/bin/python3 -m pip install -U Sphinx==2.3.1 blurb @echo "The venv has been created in the $(VENVDIR) directory" dist: diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 8e73e104cd12d..b0435268a246d 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -18,12 +18,12 @@ /*--start constants--*/ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 6 -#define PY_MICRO_VERSION 10 -#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL -#define PY_RELEASE_SERIAL 0 +#define PY_MICRO_VERSION 11 +#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA +#define PY_RELEASE_SERIAL 1 /* Version as a string */ -#define PY_VERSION "3.6.10+" +#define PY_VERSION "3.6.11rc1" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index b7d7cfa412bc3..69bf815cab361 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Wed Dec 11 03:20:37 2019 +# Autogenerated by Sphinx on Wed Jun 17 06:55:37 2020 topics = {'assert': 'The "assert" statement\n' '**********************\n' '\n' @@ -99,27 +99,26 @@ 'assigned,\n' ' from left to right, to the corresponding targets.\n' '\n' - ' * If the target list contains one target prefixed with an\n' - ' asterisk, called a ?starred? target: The object must be ' - 'an\n' - ' iterable with at least as many items as there are targets ' - 'in the\n' - ' target list, minus one. The first items of the iterable ' - 'are\n' - ' assigned, from left to right, to the targets before the ' + ' * If the target list contains one target prefixed with an ' + 'asterisk,\n' + ' called a ?starred? target: The object must be an iterable ' + 'with at\n' + ' least as many items as there are targets in the target ' + 'list, minus\n' + ' one. The first items of the iterable are assigned, from ' + 'left to\n' + ' right, to the targets before the starred target. The ' + 'final items\n' + ' of the iterable are assigned to the targets after the ' 'starred\n' - ' target. The final items of the iterable are assigned to ' - 'the\n' - ' targets after the starred target. A list of the remaining ' - 'items\n' - ' in the iterable is then assigned to the starred target ' - '(the list\n' - ' can be empty).\n' + ' target. A list of the remaining items in the iterable is ' + 'then\n' + ' assigned to the starred target (the list can be empty).\n' '\n' ' * Else: The object must be an iterable with the same number ' - 'of\n' - ' items as there are targets in the target list, and the ' - 'items are\n' + 'of items\n' + ' as there are targets in the target list, and the items ' + 'are\n' ' assigned, from left to right, to the corresponding ' 'targets.\n' '\n' @@ -135,10 +134,10 @@ 'in the\n' ' current local namespace.\n' '\n' - ' * Otherwise: the name is bound to the object in the global\n' - ' namespace or the outer namespace determined by ' - '"nonlocal",\n' - ' respectively.\n' + ' * Otherwise: the name is bound to the object in the global ' + 'namespace\n' + ' or the outer namespace determined by "nonlocal", ' + 'respectively.\n' '\n' ' The name is rebound if it was already bound. This may cause ' 'the\n' @@ -224,26 +223,27 @@ 'called with\n' ' appropriate arguments.\n' '\n' - '* If the target is a slicing: The primary expression in the\n' - ' reference is evaluated. It should yield a mutable sequence ' - 'object\n' - ' (such as a list). The assigned object should be a sequence ' - 'object\n' - ' of the same type. Next, the lower and upper bound ' - 'expressions are\n' - ' evaluated, insofar they are present; defaults are zero and ' - 'the\n' - ' sequence?s length. The bounds should evaluate to integers. ' - 'If\n' - ' either bound is negative, the sequence?s length is added to ' - 'it. The\n' - ' resulting bounds are clipped to lie between zero and the ' + '* If the target is a slicing: The primary expression in the ' + 'reference\n' + ' is evaluated. It should yield a mutable sequence object ' + '(such as a\n' + ' list). The assigned object should be a sequence object of ' + 'the same\n' + ' type. Next, the lower and upper bound expressions are ' + 'evaluated,\n' + ' insofar they are present; defaults are zero and the ' 'sequence?s\n' - ' length, inclusive. Finally, the sequence object is asked to ' - 'replace\n' - ' the slice with the items of the assigned sequence. The ' - 'length of\n' - ' the slice may be different from the length of the assigned ' + ' length. The bounds should evaluate to integers. If either ' + 'bound is\n' + ' negative, the sequence?s length is added to it. The ' + 'resulting\n' + ' bounds are clipped to lie between zero and the sequence?s ' + 'length,\n' + ' inclusive. Finally, the sequence object is asked to replace ' + 'the\n' + ' slice with the items of the assigned sequence. The length ' + 'of the\n' + ' slice may be different from the length of the assigned ' 'sequence,\n' ' thus changing the length of the target sequence, if the ' 'target\n' @@ -542,11 +542,13 @@ 'needs, for\n' ' example, "object.__getattribute__(self, name)".\n' '\n' - ' Note: This method may still be bypassed when looking ' - 'up special\n' - ' methods as the result of implicit invocation via ' - 'language syntax\n' - ' or built-in functions. See Special method lookup.\n' + ' Note:\n' + '\n' + ' This method may still be bypassed when looking up ' + 'special methods\n' + ' as the result of implicit invocation via language ' + 'syntax or\n' + ' built-in functions. See Special method lookup.\n' '\n' 'object.__setattr__(self, name, value)\n' '\n' @@ -604,8 +606,10 @@ '\n' ' sys.modules[__name__].__class__ = VerboseModule\n' '\n' - 'Note: Setting module "__class__" only affects lookups ' - 'made using the\n' + 'Note:\n' + '\n' + ' Setting module "__class__" only affects lookups made ' + 'using the\n' ' attribute access syntax ? directly accessing the ' 'module globals\n' ' (whether by code within the module, or via a reference ' @@ -813,10 +817,9 @@ '--------------------------\n' '\n' '* When inheriting from a class without *__slots__*, the ' - '*__dict__*\n' - ' and *__weakref__* attribute of the instances will ' - 'always be\n' - ' accessible.\n' + '*__dict__* and\n' + ' *__weakref__* attribute of the instances will always ' + 'be accessible.\n' '\n' '* Without a *__dict__* variable, instances cannot be ' 'assigned new\n' @@ -831,14 +834,12 @@ ' declaration.\n' '\n' '* Without a *__weakref__* variable for each instance, ' - 'classes\n' - ' defining *__slots__* do not support weak references to ' - 'its\n' - ' instances. If weak reference support is needed, then ' - 'add\n' - ' "\'__weakref__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' + 'classes defining\n' + ' *__slots__* do not support weak references to its ' + 'instances. If weak\n' + ' reference support is needed, then add ' + '"\'__weakref__\'" to the\n' + ' sequence of strings in the *__slots__* declaration.\n' '\n' '* *__slots__* are implemented at the class level by ' 'creating\n' @@ -851,24 +852,23 @@ ' attribute would overwrite the descriptor assignment.\n' '\n' '* The action of a *__slots__* declaration is not limited ' - 'to the\n' - ' class where it is defined. *__slots__* declared in ' - 'parents are\n' - ' available in child classes. However, child subclasses ' - 'will get a\n' - ' *__dict__* and *__weakref__* unless they also define ' - '*__slots__*\n' - ' (which should only contain names of any *additional* ' - 'slots).\n' + 'to the class\n' + ' where it is defined. *__slots__* declared in parents ' + 'are available\n' + ' in child classes. However, child subclasses will get a ' + '*__dict__*\n' + ' and *__weakref__* unless they also define *__slots__* ' + '(which should\n' + ' only contain names of any *additional* slots).\n' '\n' '* If a class defines a slot also defined in a base ' - 'class, the\n' - ' instance variable defined by the base class slot is ' - 'inaccessible\n' - ' (except by retrieving its descriptor directly from the ' - 'base class).\n' - ' This renders the meaning of the program undefined. In ' - 'the future, a\n' + 'class, the instance\n' + ' variable defined by the base class slot is ' + 'inaccessible (except by\n' + ' retrieving its descriptor directly from the base ' + 'class). This\n' + ' renders the meaning of the program undefined. In the ' + 'future, a\n' ' check may be added to prevent this.\n' '\n' '* Nonempty *__slots__* does not work for classes derived ' @@ -877,9 +877,9 @@ '"bytes" and "tuple".\n' '\n' '* Any non-string iterable may be assigned to ' - '*__slots__*. Mappings\n' - ' may also be used; however, in the future, special ' - 'meaning may be\n' + '*__slots__*. Mappings may\n' + ' also be used; however, in the future, special meaning ' + 'may be\n' ' assigned to the values corresponding to each key.\n' '\n' '* *__class__* assignment works only if both classes have ' @@ -1657,15 +1657,15 @@ '\n' ' Strings and binary sequences cannot be directly compared.\n' '\n' - '* Sequences (instances of "tuple", "list", or "range") can ' - 'be\n' - ' compared only within each of their types, with the ' - 'restriction that\n' - ' ranges do not support order comparison. Equality ' - 'comparison across\n' - ' these types results in inequality, and ordering comparison ' - 'across\n' - ' these types raises "TypeError".\n' + '* Sequences (instances of "tuple", "list", or "range") can be ' + 'compared\n' + ' only within each of their types, with the restriction that ' + 'ranges do\n' + ' not support order comparison. Equality comparison across ' + 'these\n' + ' types results in inequality, and ordering comparison across ' + 'these\n' + ' types raises "TypeError".\n' '\n' ' Sequences compare lexicographically using comparison of\n' ' corresponding elements, whereby reflexivity of the elements ' @@ -1714,8 +1714,8 @@ ' false because the type is not the same).\n' '\n' ' * Collections that support order comparison are ordered the ' - 'same\n' - ' as their first unequal elements (for example, "[1,2,x] <= ' + 'same as\n' + ' their first unequal elements (for example, "[1,2,x] <= ' '[1,2,y]"\n' ' has the same value as "x <= y"). If a corresponding ' 'element does\n' @@ -1733,8 +1733,8 @@ '"TypeError".\n' '\n' '* Sets (instances of "set" or "frozenset") can be compared ' - 'within\n' - ' and across their types.\n' + 'within and\n' + ' across their types.\n' '\n' ' They define order comparison operators to mean subset and ' 'superset\n' @@ -1753,8 +1753,8 @@ ' Comparison of sets enforces reflexivity of its elements.\n' '\n' '* Most other built-in types have no comparison methods ' - 'implemented,\n' - ' so they inherit the default comparison behavior.\n' + 'implemented, so\n' + ' they inherit the default comparison behavior.\n' '\n' 'User-defined classes that customize their comparison behavior ' 'should\n' @@ -1803,10 +1803,10 @@ ' "total_ordering()" decorator.\n' '\n' '* The "hash()" result should be consistent with equality. ' - 'Objects\n' - ' that are equal should either have the same hash value, or ' - 'be marked\n' - ' as unhashable.\n' + 'Objects that\n' + ' are equal should either have the same hash value, or be ' + 'marked as\n' + ' unhashable.\n' '\n' 'Python does not enforce these consistency rules. In fact, ' 'the\n' @@ -2079,10 +2079,11 @@ ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, ' '2]".\n' '\n' - 'Note: There is a subtlety when the sequence is being modified by ' - 'the\n' - ' loop (this can only occur for mutable sequences, e.g. lists). ' - 'An\n' + 'Note:\n' + '\n' + ' There is a subtlety when the sequence is being modified by the ' + 'loop\n' + ' (this can only occur for mutable sequences, e.g. lists). An\n' ' internal counter is used to keep track of which item is used ' 'next,\n' ' and this is incremented on each iteration. When this counter ' @@ -2303,20 +2304,22 @@ 'follows:\n' '\n' '1. The context expression (the expression given in the ' - '"with_item")\n' - ' is evaluated to obtain a context manager.\n' + '"with_item") is\n' + ' evaluated to obtain a context manager.\n' '\n' '2. The context manager?s "__exit__()" is loaded for later use.\n' '\n' '3. The context manager?s "__enter__()" method is invoked.\n' '\n' - '4. If a target was included in the "with" statement, the return\n' - ' value from "__enter__()" is assigned to it.\n' + '4. If a target was included in the "with" statement, the return ' + 'value\n' + ' from "__enter__()" is assigned to it.\n' '\n' - ' Note: The "with" statement guarantees that if the ' - '"__enter__()"\n' - ' method returns without an error, then "__exit__()" will ' - 'always be\n' + ' Note:\n' + '\n' + ' The "with" statement guarantees that if the "__enter__()" ' + 'method\n' + ' returns without an error, then "__exit__()" will always be\n' ' called. Thus, if an error occurs during the assignment to ' 'the\n' ' target list, it will be treated the same as an error ' @@ -2768,14 +2771,17 @@ '\n' '-[ Footnotes ]-\n' '\n' - '[1] The exception is propagated to the invocation stack unless\n' - ' there is a "finally" clause which happens to raise another\n' - ' exception. That new exception causes the old one to be ' - 'lost.\n' + '[1] The exception is propagated to the invocation stack unless ' + 'there\n' + ' is a "finally" clause which happens to raise another ' + 'exception.\n' + ' That new exception causes the old one to be lost.\n' '\n' - '[2] A string literal appearing as the first statement in the\n' - ' function body is transformed into the function?s "__doc__"\n' - ' attribute and therefore the function?s *docstring*.\n' + '[2] A string literal appearing as the first statement in the ' + 'function\n' + ' body is transformed into the function?s "__doc__" attribute ' + 'and\n' + ' therefore the function?s *docstring*.\n' '\n' '[3] A string literal appearing as the first statement in the ' 'class\n' @@ -2875,8 +2881,8 @@ ' complex;\n' '\n' '* otherwise, if either argument is a floating point number, ' - 'the\n' - ' other is converted to floating point;\n' + 'the other\n' + ' is converted to floating point;\n' '\n' '* otherwise, both must be integers and no conversion is ' 'necessary.\n' @@ -2984,7 +2990,9 @@ 'for\n' ' objects that still exist when the interpreter exits.\n' '\n' - ' Note: "del x" doesn?t directly call "x.__del__()" ? the ' + ' Note:\n' + '\n' + ' "del x" doesn?t directly call "x.__del__()" ? the ' 'former\n' ' decrements the reference count for "x" by one, and the ' 'latter is\n' @@ -3008,13 +3016,15 @@ '\n' ' See also: Documentation for the "gc" module.\n' '\n' - ' Warning: Due to the precarious circumstances under ' - 'which\n' - ' "__del__()" methods are invoked, exceptions that occur ' - 'during\n' - ' their execution are ignored, and a warning is printed ' - 'to\n' - ' "sys.stderr" instead. In particular:\n' + ' Warning:\n' + '\n' + ' Due to the precarious circumstances under which ' + '"__del__()"\n' + ' methods are invoked, exceptions that occur during ' + 'their execution\n' + ' are ignored, and a warning is printed to "sys.stderr" ' + 'instead.\n' + ' In particular:\n' '\n' ' * "__del__()" can be invoked when arbitrary code is ' 'being\n' @@ -3027,22 +3037,20 @@ ' that gets interrupted to execute "__del__()".\n' '\n' ' * "__del__()" can be executed during interpreter ' - 'shutdown. As\n' - ' a consequence, the global variables it needs to ' - 'access\n' - ' (including other modules) may already have been ' - 'deleted or set\n' - ' to "None". Python guarantees that globals whose name ' - 'begins\n' - ' with a single underscore are deleted from their ' - 'module before\n' - ' other globals are deleted; if no other references to ' - 'such\n' - ' globals exist, this may help in assuring that ' - 'imported modules\n' - ' are still available at the time when the "__del__()" ' - 'method is\n' - ' called.\n' + 'shutdown. As a\n' + ' consequence, the global variables it needs to access ' + '(including\n' + ' other modules) may already have been deleted or set ' + 'to "None".\n' + ' Python guarantees that globals whose name begins ' + 'with a single\n' + ' underscore are deleted from their module before ' + 'other globals\n' + ' are deleted; if no other references to such globals ' + 'exist, this\n' + ' may help in assuring that imported modules are still ' + 'available\n' + ' at the time when the "__del__()" method is called.\n' '\n' 'object.__repr__(self)\n' '\n' @@ -3213,19 +3221,21 @@ ' def __hash__(self):\n' ' return hash((self.name, self.nick, self.color))\n' '\n' - ' Note: "hash()" truncates the value returned from an ' - 'object?s\n' - ' custom "__hash__()" method to the size of a ' - '"Py_ssize_t". This\n' - ' is typically 8 bytes on 64-bit builds and 4 bytes on ' - '32-bit\n' - ' builds. If an object?s "__hash__()" must ' - 'interoperate on builds\n' - ' of different bit sizes, be sure to check the width on ' - 'all\n' - ' supported builds. An easy way to do this is with ' - '"python -c\n' - ' "import sys; print(sys.hash_info.width)"".\n' + ' Note:\n' + '\n' + ' "hash()" truncates the value returned from an object?s ' + 'custom\n' + ' "__hash__()" method to the size of a "Py_ssize_t". ' + 'This is\n' + ' typically 8 bytes on 64-bit builds and 4 bytes on ' + '32-bit builds.\n' + ' If an object?s "__hash__()" must interoperate on ' + 'builds of\n' + ' different bit sizes, be sure to check the width on all ' + 'supported\n' + ' builds. An easy way to do this is with "python -c ' + '"import sys;\n' + ' print(sys.hash_info.width)"".\n' '\n' ' If a class does not define an "__eq__()" method it ' 'should not\n' @@ -3283,10 +3293,12 @@ ' hashable by an "isinstance(obj, collections.Hashable)" ' 'call.\n' '\n' - ' Note: By default, the "__hash__()" values of str, bytes ' - 'and\n' - ' datetime objects are ?salted? with an unpredictable ' - 'random value.\n' + ' Note:\n' + '\n' + ' By default, the "__hash__()" values of str, bytes and ' + 'datetime\n' + ' objects are ?salted? with an unpredictable random ' + 'value.\n' ' Although they remain constant within an individual ' 'Python\n' ' process, they are not predictable between repeated ' @@ -3864,9 +3876,11 @@ 'its\n' ' value.\n' '\n' - ' Note: "print()" can also be used, but is not a debugger ' - 'command ?\n' - ' this executes the Python "print()" function.\n' + ' Note:\n' + '\n' + ' "print()" can also be used, but is not a debugger command ? ' + 'this\n' + ' executes the Python "print()" function.\n' '\n' 'pp expression\n' '\n' @@ -3986,8 +4000,8 @@ '-[ Footnotes ]-\n' '\n' '[1] Whether a frame is considered to originate in a certain ' - 'module\n' - ' is determined by the "__name__" in the frame globals.\n', + 'module is\n' + ' determined by the "__name__" in the frame globals.\n', 'del': 'The "del" statement\n' '*******************\n' '\n' @@ -4160,13 +4174,15 @@ 'about the\n' 'exceptional condition.\n' '\n' - 'Note: Exception messages are not part of the Python API. ' - 'Their\n' - ' contents may change from one version of Python to the next ' - 'without\n' - ' warning and should not be relied on by code which will run ' - 'under\n' - ' multiple versions of the interpreter.\n' + 'Note:\n' + '\n' + ' Exception messages are not part of the Python API. Their ' + 'contents\n' + ' may change from one version of Python to the next without ' + 'warning\n' + ' and should not be relied on by code which will run under ' + 'multiple\n' + ' versions of the interpreter.\n' '\n' 'See also the description of the "try" statement in section The ' 'try\n' @@ -4176,10 +4192,9 @@ '-[ Footnotes ]-\n' '\n' '[1] This limitation occurs because the code that is executed ' - 'by\n' - ' these operations is not available at the time the module ' - 'is\n' - ' compiled.\n', + 'by these\n' + ' operations is not available at the time the module is ' + 'compiled.\n', 'execmodel': 'Execution model\n' '***************\n' '\n' @@ -4481,13 +4496,15 @@ 'about the\n' 'exceptional condition.\n' '\n' - 'Note: Exception messages are not part of the Python API. ' - 'Their\n' - ' contents may change from one version of Python to the next ' - 'without\n' - ' warning and should not be relied on by code which will run ' - 'under\n' - ' multiple versions of the interpreter.\n' + 'Note:\n' + '\n' + ' Exception messages are not part of the Python API. Their ' + 'contents\n' + ' may change from one version of Python to the next without ' + 'warning\n' + ' and should not be relied on by code which will run under ' + 'multiple\n' + ' versions of the interpreter.\n' '\n' 'See also the description of the "try" statement in section The ' 'try\n' @@ -4496,11 +4513,10 @@ '\n' '-[ Footnotes ]-\n' '\n' - '[1] This limitation occurs because the code that is executed ' - 'by\n' - ' these operations is not available at the time the module ' - 'is\n' - ' compiled.\n', + '[1] This limitation occurs because the code that is executed by ' + 'these\n' + ' operations is not available at the time the module is ' + 'compiled.\n', 'exprlists': 'Expression lists\n' '****************\n' '\n' @@ -4620,8 +4636,11 @@ 'i\n' ':= a to b do"; e.g., "list(range(3))" returns the list "[0, 1, 2]".\n' '\n' - 'Note: There is a subtlety when the sequence is being modified by the\n' - ' loop (this can only occur for mutable sequences, e.g. lists). An\n' + 'Note:\n' + '\n' + ' There is a subtlety when the sequence is being modified by the ' + 'loop\n' + ' (this can only occur for mutable sequences, e.g. lists). An\n' ' internal counter is used to keep track of which item is used next,\n' ' and this is incremented on each iteration. When this counter has\n' ' reached the length of the sequence the loop terminates. This ' @@ -5593,7 +5612,9 @@ 'defined.\n' ' See section The import statement.\n' '\n' - ' Note: The name "_" is often used in conjunction with\n' + ' Note:\n' + '\n' + ' The name "_" is often used in conjunction with\n' ' internationalization; refer to the documentation for the\n' ' "gettext" module for more information on this ' 'convention.\n' @@ -5740,7 +5761,9 @@ 'defined.\n' ' See section The import statement.\n' '\n' - ' Note: The name "_" is often used in conjunction with\n' + ' Note:\n' + '\n' + ' The name "_" is often used in conjunction with\n' ' internationalization; refer to the documentation for ' 'the\n' ' "gettext" module for more information on this ' @@ -5825,8 +5848,9 @@ '\n' '1. find a module, loading and initializing it if necessary\n' '\n' - '2. define a name or names in the local namespace for the scope\n' - ' where the "import" statement occurs.\n' + '2. define a name or names in the local namespace for the scope ' + 'where\n' + ' the "import" statement occurs.\n' '\n' 'When the statement contains multiple clauses (separated by commas) ' 'the\n' @@ -5852,8 +5876,9 @@ 'made\n' 'available in the local namespace in one of three ways:\n' '\n' - '* If the module name is followed by "as", then the name following\n' - ' "as" is bound directly to the imported module.\n' + '* If the module name is followed by "as", then the name following ' + '"as"\n' + ' is bound directly to the imported module.\n' '\n' '* If no other name is specified, and the module being imported is ' 'a\n' @@ -6520,15 +6545,17 @@ '"__rpow__()" (the\n' ' coercion rules would become too complicated).\n' '\n' - ' Note: If the right operand?s type is a subclass of the ' - 'left\n' - ' operand?s type and that subclass provides the ' - 'reflected method\n' - ' for the operation, this method will be called before ' - 'the left\n' - ' operand?s non-reflected method. This behavior allows ' - 'subclasses\n' - ' to override their ancestors? operations.\n' + ' Note:\n' + '\n' + ' If the right operand?s type is a subclass of the left ' + 'operand?s\n' + ' type and that subclass provides the reflected method ' + 'for the\n' + ' operation, this method will be called before the left ' + 'operand?s\n' + ' non-reflected method. This behavior allows subclasses ' + 'to\n' + ' override their ancestors? operations.\n' '\n' 'object.__iadd__(self, other)\n' 'object.__isub__(self, other)\n' @@ -6602,8 +6629,9 @@ 'numeric\n' ' object is an integer type. Must return an integer.\n' '\n' - ' Note: In order to have a coherent integer type class, ' - 'when\n' + ' Note:\n' + '\n' + ' In order to have a coherent integer type class, when\n' ' "__index__()" is defined "__int__()" should also be ' 'defined, and\n' ' both should return the same value.\n' @@ -6845,8 +6873,8 @@ '-[ Footnotes ]-\n' '\n' '[1] While "abs(x%y) < abs(y)" is true mathematically, ' - 'for floats\n' - ' it may not be true numerically due to roundoff. For ' + 'for floats it\n' + ' may not be true numerically due to roundoff. For ' 'example, and\n' ' assuming a platform on which a Python float is an ' 'IEEE 754 double-\n' @@ -6911,22 +6939,22 @@ '"unicodedata.normalize()".\n' '\n' '[4] Due to automatic garbage-collection, free lists, and ' - 'the\n' - ' dynamic nature of descriptors, you may notice ' - 'seemingly unusual\n' - ' behaviour in certain uses of the "is" operator, like ' - 'those\n' - ' involving comparisons between instance methods, or ' - 'constants.\n' - ' Check their documentation for more info.\n' + 'the dynamic\n' + ' nature of descriptors, you may notice seemingly ' + 'unusual behaviour\n' + ' in certain uses of the "is" operator, like those ' + 'involving\n' + ' comparisons between instance methods, or constants. ' + 'Check their\n' + ' documentation for more info.\n' '\n' '[5] The "%" operator is also used for string formatting; ' 'the same\n' ' precedence applies.\n' '\n' '[6] The power operator "**" binds less tightly than an ' - 'arithmetic\n' - ' or bitwise unary operator on its right, that is, ' + 'arithmetic or\n' + ' bitwise unary operator on its right, that is, ' '"2**-1" is "0.5".\n', 'pass': 'The "pass" statement\n' '********************\n' @@ -7212,9 +7240,11 @@ '\n' ' New in version 3.4.\n' '\n' - 'Note: Slicing is done exclusively with the following three ' - 'methods.\n' - ' A call like\n' + 'Note:\n' + '\n' + ' Slicing is done exclusively with the following three ' + 'methods. A\n' + ' call like\n' '\n' ' a[1:2] = b\n' '\n' @@ -7245,7 +7275,9 @@ 'the\n' ' container), "KeyError" should be raised.\n' '\n' - ' Note: "for" loops expect that an "IndexError" will be ' + ' Note:\n' + '\n' + ' "for" loops expect that an "IndexError" will be ' 'raised for\n' ' illegal indexes to allow proper detection of the end ' 'of the\n' @@ -7366,11 +7398,13 @@ 'A left shift by *n* bits is defined as multiplication with ' '"pow(2,n)".\n' '\n' - 'Note: In the current implementation, the right-hand operand is\n' - ' required to be at most "sys.maxsize". If the right-hand ' - 'operand is\n' - ' larger than "sys.maxsize" an "OverflowError" exception is ' - 'raised.\n', + 'Note:\n' + '\n' + ' In the current implementation, the right-hand operand is ' + 'required to\n' + ' be at most "sys.maxsize". If the right-hand operand is larger ' + 'than\n' + ' "sys.maxsize" an "OverflowError" exception is raised.\n', 'slicings': 'Slicings\n' '********\n' '\n' @@ -7487,26 +7521,26 @@ '-[ Footnotes ]-\n' '\n' '[1] Additional information on these special methods may be ' - 'found\n' - ' in the Python Reference Manual (Basic customization).\n' + 'found in\n' + ' the Python Reference Manual (Basic customization).\n' '\n' '[2] As a consequence, the list "[1, 2]" is considered equal ' - 'to\n' - ' "[1.0, 2.0]", and similarly for tuples.\n' + 'to "[1.0,\n' + ' 2.0]", and similarly for tuples.\n' '\n' '[3] They must have since the parser can?t tell the type of ' 'the\n' ' operands.\n' '\n' '[4] Cased characters are those with general category ' - 'property\n' - ' being one of ?Lu? (Letter, uppercase), ?Ll? (Letter, ' - 'lowercase),\n' - ' or ?Lt? (Letter, titlecase).\n' - '\n' - '[5] To format only a tuple you should therefore provide a\n' - ' singleton tuple whose only element is the tuple to be ' - 'formatted.\n', + 'property being\n' + ' one of ?Lu? (Letter, uppercase), ?Ll? (Letter, ' + 'lowercase), or ?Lt?\n' + ' (Letter, titlecase).\n' + '\n' + '[5] To format only a tuple you should therefore provide a ' + 'singleton\n' + ' tuple whose only element is the tuple to be formatted.\n', 'specialnames': 'Special method names\n' '********************\n' '\n' @@ -7649,7 +7683,9 @@ 'for\n' ' objects that still exist when the interpreter exits.\n' '\n' - ' Note: "del x" doesn?t directly call "x.__del__()" ? the ' + ' Note:\n' + '\n' + ' "del x" doesn?t directly call "x.__del__()" ? the ' 'former\n' ' decrements the reference count for "x" by one, and the ' 'latter is\n' @@ -7673,12 +7709,15 @@ '\n' ' See also: Documentation for the "gc" module.\n' '\n' - ' Warning: Due to the precarious circumstances under which\n' - ' "__del__()" methods are invoked, exceptions that occur ' - 'during\n' - ' their execution are ignored, and a warning is printed ' - 'to\n' - ' "sys.stderr" instead. In particular:\n' + ' Warning:\n' + '\n' + ' Due to the precarious circumstances under which ' + '"__del__()"\n' + ' methods are invoked, exceptions that occur during their ' + 'execution\n' + ' are ignored, and a warning is printed to "sys.stderr" ' + 'instead.\n' + ' In particular:\n' '\n' ' * "__del__()" can be invoked when arbitrary code is ' 'being\n' @@ -7691,22 +7730,20 @@ ' that gets interrupted to execute "__del__()".\n' '\n' ' * "__del__()" can be executed during interpreter ' - 'shutdown. As\n' - ' a consequence, the global variables it needs to ' - 'access\n' - ' (including other modules) may already have been ' - 'deleted or set\n' - ' to "None". Python guarantees that globals whose name ' - 'begins\n' - ' with a single underscore are deleted from their ' - 'module before\n' - ' other globals are deleted; if no other references to ' - 'such\n' - ' globals exist, this may help in assuring that ' - 'imported modules\n' - ' are still available at the time when the "__del__()" ' - 'method is\n' - ' called.\n' + 'shutdown. As a\n' + ' consequence, the global variables it needs to access ' + '(including\n' + ' other modules) may already have been deleted or set ' + 'to "None".\n' + ' Python guarantees that globals whose name begins with ' + 'a single\n' + ' underscore are deleted from their module before other ' + 'globals\n' + ' are deleted; if no other references to such globals ' + 'exist, this\n' + ' may help in assuring that imported modules are still ' + 'available\n' + ' at the time when the "__del__()" method is called.\n' '\n' 'object.__repr__(self)\n' '\n' @@ -7877,19 +7914,21 @@ ' def __hash__(self):\n' ' return hash((self.name, self.nick, self.color))\n' '\n' - ' Note: "hash()" truncates the value returned from an ' - 'object?s\n' - ' custom "__hash__()" method to the size of a ' - '"Py_ssize_t". This\n' - ' is typically 8 bytes on 64-bit builds and 4 bytes on ' - '32-bit\n' - ' builds. If an object?s "__hash__()" must interoperate ' - 'on builds\n' - ' of different bit sizes, be sure to check the width on ' - 'all\n' - ' supported builds. An easy way to do this is with ' - '"python -c\n' - ' "import sys; print(sys.hash_info.width)"".\n' + ' Note:\n' + '\n' + ' "hash()" truncates the value returned from an object?s ' + 'custom\n' + ' "__hash__()" method to the size of a "Py_ssize_t". ' + 'This is\n' + ' typically 8 bytes on 64-bit builds and 4 bytes on ' + '32-bit builds.\n' + ' If an object?s "__hash__()" must interoperate on ' + 'builds of\n' + ' different bit sizes, be sure to check the width on all ' + 'supported\n' + ' builds. An easy way to do this is with "python -c ' + '"import sys;\n' + ' print(sys.hash_info.width)"".\n' '\n' ' If a class does not define an "__eq__()" method it should ' 'not\n' @@ -7945,10 +7984,12 @@ ' hashable by an "isinstance(obj, collections.Hashable)" ' 'call.\n' '\n' - ' Note: By default, the "__hash__()" values of str, bytes ' - 'and\n' - ' datetime objects are ?salted? with an unpredictable ' - 'random value.\n' + ' Note:\n' + '\n' + ' By default, the "__hash__()" values of str, bytes and ' + 'datetime\n' + ' objects are ?salted? with an unpredictable random ' + 'value.\n' ' Although they remain constant within an individual ' 'Python\n' ' process, they are not predictable between repeated ' @@ -8048,11 +8089,13 @@ 'needs, for\n' ' example, "object.__getattribute__(self, name)".\n' '\n' - ' Note: This method may still be bypassed when looking up ' - 'special\n' - ' methods as the result of implicit invocation via ' - 'language syntax\n' - ' or built-in functions. See Special method lookup.\n' + ' Note:\n' + '\n' + ' This method may still be bypassed when looking up ' + 'special methods\n' + ' as the result of implicit invocation via language ' + 'syntax or\n' + ' built-in functions. See Special method lookup.\n' '\n' 'object.__setattr__(self, name, value)\n' '\n' @@ -8110,8 +8153,10 @@ '\n' ' sys.modules[__name__].__class__ = VerboseModule\n' '\n' - 'Note: Setting module "__class__" only affects lookups made ' - 'using the\n' + 'Note:\n' + '\n' + ' Setting module "__class__" only affects lookups made using ' + 'the\n' ' attribute access syntax ? directly accessing the module ' 'globals\n' ' (whether by code within the module, or via a reference to ' @@ -8316,10 +8361,9 @@ '~~~~~~~~~~~~~~~~~~~~~~~~~~\n' '\n' '* When inheriting from a class without *__slots__*, the ' - '*__dict__*\n' - ' and *__weakref__* attribute of the instances will always ' - 'be\n' - ' accessible.\n' + '*__dict__* and\n' + ' *__weakref__* attribute of the instances will always be ' + 'accessible.\n' '\n' '* Without a *__dict__* variable, instances cannot be ' 'assigned new\n' @@ -8333,13 +8377,12 @@ ' declaration.\n' '\n' '* Without a *__weakref__* variable for each instance, ' - 'classes\n' - ' defining *__slots__* do not support weak references to ' - 'its\n' - ' instances. If weak reference support is needed, then add\n' - ' "\'__weakref__\'" to the sequence of strings in the ' - '*__slots__*\n' - ' declaration.\n' + 'classes defining\n' + ' *__slots__* do not support weak references to its ' + 'instances. If weak\n' + ' reference support is needed, then add "\'__weakref__\'" to ' + 'the\n' + ' sequence of strings in the *__slots__* declaration.\n' '\n' '* *__slots__* are implemented at the class level by ' 'creating\n' @@ -8352,23 +8395,22 @@ ' attribute would overwrite the descriptor assignment.\n' '\n' '* The action of a *__slots__* declaration is not limited to ' - 'the\n' - ' class where it is defined. *__slots__* declared in ' - 'parents are\n' - ' available in child classes. However, child subclasses will ' - 'get a\n' - ' *__dict__* and *__weakref__* unless they also define ' - '*__slots__*\n' - ' (which should only contain names of any *additional* ' - 'slots).\n' + 'the class\n' + ' where it is defined. *__slots__* declared in parents are ' + 'available\n' + ' in child classes. However, child subclasses will get a ' + '*__dict__*\n' + ' and *__weakref__* unless they also define *__slots__* ' + '(which should\n' + ' only contain names of any *additional* slots).\n' '\n' '* If a class defines a slot also defined in a base class, ' - 'the\n' - ' instance variable defined by the base class slot is ' - 'inaccessible\n' - ' (except by retrieving its descriptor directly from the ' - 'base class).\n' - ' This renders the meaning of the program undefined. In the ' + 'the instance\n' + ' variable defined by the base class slot is inaccessible ' + '(except by\n' + ' retrieving its descriptor directly from the base class). ' + 'This\n' + ' renders the meaning of the program undefined. In the ' 'future, a\n' ' check may be added to prevent this.\n' '\n' @@ -8378,9 +8420,9 @@ 'and "tuple".\n' '\n' '* Any non-string iterable may be assigned to *__slots__*. ' - 'Mappings\n' - ' may also be used; however, in the future, special meaning ' - 'may be\n' + 'Mappings may\n' + ' also be used; however, in the future, special meaning may ' + 'be\n' ' assigned to the values corresponding to each key.\n' '\n' '* *__class__* assignment works only if both classes have the ' @@ -8444,9 +8486,11 @@ 'does nothing,\n' ' but raises an error if it is called with any arguments.\n' '\n' - ' Note: The metaclass hint "metaclass" is consumed by the ' - 'rest of\n' - ' the type machinery, and is never passed to ' + ' Note:\n' + '\n' + ' The metaclass hint "metaclass" is consumed by the rest ' + 'of the\n' + ' type machinery, and is never passed to ' '"__init_subclass__"\n' ' implementations. The actual metaclass (rather than the ' 'explicit\n' @@ -8832,9 +8876,11 @@ '\n' ' New in version 3.4.\n' '\n' - 'Note: Slicing is done exclusively with the following three ' - 'methods.\n' - ' A call like\n' + 'Note:\n' + '\n' + ' Slicing is done exclusively with the following three ' + 'methods. A\n' + ' call like\n' '\n' ' a[1:2] = b\n' '\n' @@ -8865,8 +8911,10 @@ 'the\n' ' container), "KeyError" should be raised.\n' '\n' - ' Note: "for" loops expect that an "IndexError" will be ' - 'raised for\n' + ' Note:\n' + '\n' + ' "for" loops expect that an "IndexError" will be raised ' + 'for\n' ' illegal indexes to allow proper detection of the end of ' 'the\n' ' sequence.\n' @@ -9056,15 +9104,17 @@ '"__rpow__()" (the\n' ' coercion rules would become too complicated).\n' '\n' - ' Note: If the right operand?s type is a subclass of the ' - 'left\n' - ' operand?s type and that subclass provides the reflected ' - 'method\n' - ' for the operation, this method will be called before ' - 'the left\n' - ' operand?s non-reflected method. This behavior allows ' - 'subclasses\n' - ' to override their ancestors? operations.\n' + ' Note:\n' + '\n' + ' If the right operand?s type is a subclass of the left ' + 'operand?s\n' + ' type and that subclass provides the reflected method ' + 'for the\n' + ' operation, this method will be called before the left ' + 'operand?s\n' + ' non-reflected method. This behavior allows subclasses ' + 'to\n' + ' override their ancestors? operations.\n' '\n' 'object.__iadd__(self, other)\n' 'object.__isub__(self, other)\n' @@ -9138,8 +9188,9 @@ 'numeric\n' ' object is an integer type. Must return an integer.\n' '\n' - ' Note: In order to have a coherent integer type class, ' - 'when\n' + ' Note:\n' + '\n' + ' In order to have a coherent integer type class, when\n' ' "__index__()" is defined "__int__()" should also be ' 'defined, and\n' ' both should return the same value.\n' @@ -9463,11 +9514,13 @@ '"-1" if\n' ' *sub* is not found.\n' '\n' - ' Note: The "find()" method should be used only if you ' - 'need to know\n' - ' the position of *sub*. To check if *sub* is a ' - 'substring or not,\n' - ' use the "in" operator:\n' + ' Note:\n' + '\n' + ' The "find()" method should be used only if you need ' + 'to know the\n' + ' position of *sub*. To check if *sub* is a substring ' + 'or not, use\n' + ' the "in" operator:\n' '\n' " >>> 'Py' in 'Python'\n" ' True\n' @@ -9496,8 +9549,9 @@ ' formatting options that can be specified in format ' 'strings.\n' '\n' - ' Note: When formatting a number ("int", "float", ' - '"complex",\n' + ' Note:\n' + '\n' + ' When formatting a number ("int", "float", "complex",\n' ' "decimal.Decimal" and subclasses) with the "n" type ' '(ex:\n' ' "\'{:n}\'.format(1234)"), the function temporarily ' @@ -10321,17 +10375,20 @@ '\n' '2. Unlike in Standard C, exactly two hex digits are required.\n' '\n' - '3. In a bytes literal, hexadecimal and octal escapes denote the\n' - ' byte with the given value. In a string literal, these escapes\n' - ' denote a Unicode character with the given value.\n' + '3. In a bytes literal, hexadecimal and octal escapes denote the ' + 'byte\n' + ' with the given value. In a string literal, these escapes ' + 'denote a\n' + ' Unicode character with the given value.\n' '\n' '4. Changed in version 3.3: Support for name aliases [1] has been\n' ' added.\n' '\n' '5. Exactly four hex digits are required.\n' '\n' - '6. Any Unicode character can be encoded this way. Exactly eight\n' - ' hex digits are required.\n' + '6. Any Unicode character can be encoded this way. Exactly eight ' + 'hex\n' + ' digits are required.\n' '\n' 'Unlike Standard C, all unrecognized escape sequences are left in ' 'the\n' @@ -11873,9 +11930,11 @@ 'raise\n' ' "TypeError".\n' '\n' - 'See also: "types.MappingProxyType" can be used to create a ' - 'read-only\n' - ' view of a "dict".\n' + 'See also:\n' + '\n' + ' "types.MappingProxyType" can be used to create a read-only ' + 'view of a\n' + ' "dict".\n' '\n' '\n' 'Dictionary view objects\n' @@ -12253,13 +12312,14 @@ '"None", it\n' ' is treated like "1".\n' '\n' - '6. Concatenating immutable sequences always results in a new\n' - ' object. This means that building up a sequence by repeated\n' - ' concatenation will have a quadratic runtime cost in the ' - 'total\n' - ' sequence length. To get a linear runtime cost, you must ' - 'switch to\n' - ' one of the alternatives below:\n' + '6. Concatenating immutable sequences always results in a new ' + 'object.\n' + ' This means that building up a sequence by repeated ' + 'concatenation\n' + ' will have a quadratic runtime cost in the total sequence ' + 'length.\n' + ' To get a linear runtime cost, you must switch to one of the\n' + ' alternatives below:\n' '\n' ' * if concatenating "str" objects, you can build a list and ' 'use\n' @@ -12277,24 +12337,25 @@ ' * for other types, investigate the relevant class ' 'documentation\n' '\n' - '7. Some sequence types (such as "range") only support item\n' - ' sequences that follow specific patterns, and hence don?t ' - 'support\n' - ' sequence concatenation or repetition.\n' - '\n' - '8. "index" raises "ValueError" when *x* is not found in *s*. ' - 'Not\n' - ' all implementations support passing the additional arguments ' - '*i*\n' - ' and *j*. These arguments allow efficient searching of ' - 'subsections\n' - ' of the sequence. Passing the extra arguments is roughly ' - 'equivalent\n' - ' to using "s[i:j].index(x)", only without copying any data and ' - 'with\n' - ' the returned index being relative to the start of the ' + '7. Some sequence types (such as "range") only support item ' + 'sequences\n' + ' that follow specific patterns, and hence don?t support ' 'sequence\n' - ' rather than the start of the slice.\n' + ' concatenation or repetition.\n' + '\n' + '8. "index" raises "ValueError" when *x* is not found in *s*. Not ' + 'all\n' + ' implementations support passing the additional arguments *i* ' + 'and\n' + ' *j*. These arguments allow efficient searching of subsections ' + 'of\n' + ' the sequence. Passing the extra arguments is roughly ' + 'equivalent to\n' + ' using "s[i:j].index(x)", only without copying any data and ' + 'with the\n' + ' returned index being relative to the start of the sequence ' + 'rather\n' + ' than the start of the slice.\n' '\n' '\n' 'Immutable Sequence Types\n' @@ -12422,17 +12483,17 @@ '1. *t* must have the same length as the slice it is replacing.\n' '\n' '2. The optional argument *i* defaults to "-1", so that by ' - 'default\n' - ' the last item is removed and returned.\n' + 'default the\n' + ' last item is removed and returned.\n' '\n' '3. "remove" raises "ValueError" when *x* is not found in *s*.\n' '\n' - '4. The "reverse()" method modifies the sequence in place for\n' - ' economy of space when reversing a large sequence. To remind ' - 'users\n' - ' that it operates by side effect, it does not return the ' - 'reversed\n' - ' sequence.\n' + '4. The "reverse()" method modifies the sequence in place for ' + 'economy\n' + ' of space when reversing a large sequence. To remind users ' + 'that it\n' + ' operates by side effect, it does not return the reversed ' + 'sequence.\n' '\n' '5. "clear()" and "copy()" are included for consistency with the\n' ' interfaces of mutable containers that don?t support slicing\n' @@ -12465,9 +12526,9 @@ ' * Using a pair of square brackets to denote the empty list: ' '"[]"\n' '\n' - ' * Using square brackets, separating items with commas: ' - '"[a]",\n' - ' "[a, b, c]"\n' + ' * Using square brackets, separating items with commas: "[a]", ' + '"[a,\n' + ' b, c]"\n' '\n' ' * Using a list comprehension: "[x for x in iterable]"\n' '\n' @@ -12766,9 +12827,9 @@ '\n' 'See also:\n' '\n' - ' * The linspace recipe shows how to implement a lazy version ' - 'of\n' - ' range suitable for floating point applications.\n', + ' * The linspace recipe shows how to implement a lazy version of ' + 'range\n' + ' suitable for floating point applications.\n', 'typesseq-mutable': 'Mutable Sequence Types\n' '**********************\n' '\n' @@ -12879,19 +12940,18 @@ 'replacing.\n' '\n' '2. The optional argument *i* defaults to "-1", so that ' - 'by default\n' - ' the last item is removed and returned.\n' + 'by default the\n' + ' last item is removed and returned.\n' '\n' '3. "remove" raises "ValueError" when *x* is not found in ' '*s*.\n' '\n' '4. The "reverse()" method modifies the sequence in place ' - 'for\n' - ' economy of space when reversing a large sequence. To ' - 'remind users\n' - ' that it operates by side effect, it does not return ' - 'the reversed\n' - ' sequence.\n' + 'for economy\n' + ' of space when reversing a large sequence. To remind ' + 'users that it\n' + ' operates by side effect, it does not return the ' + 'reversed sequence.\n' '\n' '5. "clear()" and "copy()" are included for consistency ' 'with the\n' @@ -12970,19 +13030,23 @@ 'The execution of the "with" statement with one ?item? proceeds as\n' 'follows:\n' '\n' - '1. The context expression (the expression given in the "with_item")\n' - ' is evaluated to obtain a context manager.\n' + '1. The context expression (the expression given in the "with_item") ' + 'is\n' + ' evaluated to obtain a context manager.\n' '\n' '2. The context manager?s "__exit__()" is loaded for later use.\n' '\n' '3. The context manager?s "__enter__()" method is invoked.\n' '\n' - '4. If a target was included in the "with" statement, the return\n' - ' value from "__enter__()" is assigned to it.\n' + '4. If a target was included in the "with" statement, the return ' + 'value\n' + ' from "__enter__()" is assigned to it.\n' + '\n' + ' Note:\n' '\n' - ' Note: The "with" statement guarantees that if the "__enter__()"\n' - ' method returns without an error, then "__exit__()" will always ' - 'be\n' + ' The "with" statement guarantees that if the "__enter__()" ' + 'method\n' + ' returns without an error, then "__exit__()" will always be\n' ' called. Thus, if an error occurs during the assignment to the\n' ' target list, it will be treated the same as an error occurring\n' ' within the suite would be. See step 6 below.\n' diff --git a/Misc/NEWS.d/3.6.11rc1.rst b/Misc/NEWS.d/3.6.11rc1.rst new file mode 100644 index 0000000000000..a52d43f370a89 --- /dev/null +++ b/Misc/NEWS.d/3.6.11rc1.rst @@ -0,0 +1,72 @@ +.. bpo: 39073 +.. date: 2020-03-15-01-28-36 +.. nonce: 6Szd3i +.. release date: 2020-06-17 +.. section: Security + +Disallow CR or LF in email.headerregistry.Address arguments to guard against +header injection attacks. + +.. + +.. bpo: 38576 +.. date: 2020-03-14-14-57-44 +.. nonce: OowwQn +.. section: Security + +Disallow control characters in hostnames in http.client, addressing +CVE-2019-18348. Such potentially malicious header injection URLs now cause a +InvalidURL to be raised. + +.. + +.. bpo: 39503 +.. date: 2020-01-30-16-15-29 +.. nonce: B299Yq +.. section: Security + +CVE-2020-8492: The :class:`~urllib.request.AbstractBasicAuthHandler` class +of the :mod:`urllib.request` module uses an inefficient regular expression +which can be exploited by an attacker to cause a denial of service. Fix the +regex to prevent the catastrophic backtracking. Vulnerability reported by +Ben Caller and Matt Schwager. + +.. + +.. bpo: 39401 +.. date: 2020-01-28-20-54-09 +.. nonce: he7h_A +.. section: Security + +Avoid unsafe load of ``api-ms-win-core-path-l1-1-0.dll`` at startup on +Windows 7. + +.. + +.. bpo: 39510 +.. date: 2020-02-04-10-27-41 +.. nonce: PMIh-f +.. section: Core and Builtins + +Fix segfault in ``readinto()`` method on closed BufferedReader. + +.. + +.. bpo: 39421 +.. date: 2020-01-22-15-53-37 +.. nonce: O3nG7u +.. section: Core and Builtins + +Fix possible crashes when operating with the functions in the :mod:`heapq` +module and custom comparison operators. + +.. + +.. bpo: 39503 +.. date: 2020-03-25-16-02-16 +.. nonce: YmMbYn +.. section: Library + +:class:`~urllib.request.AbstractBasicAuthHandler` of :mod:`urllib.request` +now parses all WWW-Authenticate HTTP headers and accepts multiple challenges +per header: use the realm of the first Basic challenge. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-01-22-15-53-37.bpo-39421.O3nG7u.rst b/Misc/NEWS.d/next/Core and Builtins/2020-01-22-15-53-37.bpo-39421.O3nG7u.rst deleted file mode 100644 index bae008150ee12..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-01-22-15-53-37.bpo-39421.O3nG7u.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix possible crashes when operating with the functions in the :mod:`heapq` -module and custom comparison operators. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-02-04-10-27-41.bpo-39510.PMIh-f.rst b/Misc/NEWS.d/next/Core and Builtins/2020-02-04-10-27-41.bpo-39510.PMIh-f.rst deleted file mode 100644 index 9a38e4ab76228..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-02-04-10-27-41.bpo-39510.PMIh-f.rst +++ /dev/null @@ -1 +0,0 @@ -Fix segfault in ``readinto()`` method on closed BufferedReader. diff --git a/Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst b/Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst deleted file mode 100644 index be80ce79d91ed..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst +++ /dev/null @@ -1,3 +0,0 @@ -:class:`~urllib.request.AbstractBasicAuthHandler` of :mod:`urllib.request` -now parses all WWW-Authenticate HTTP headers and accepts multiple challenges -per header: use the realm of the first Basic challenge. diff --git a/Misc/NEWS.d/next/Security/2020-01-28-20-54-09.bpo-39401.he7h_A.rst b/Misc/NEWS.d/next/Security/2020-01-28-20-54-09.bpo-39401.he7h_A.rst deleted file mode 100644 index 5071e126b70d0..0000000000000 --- a/Misc/NEWS.d/next/Security/2020-01-28-20-54-09.bpo-39401.he7h_A.rst +++ /dev/null @@ -1 +0,0 @@ -Avoid unsafe load of ``api-ms-win-core-path-l1-1-0.dll`` at startup on Windows 7. diff --git a/Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst b/Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst deleted file mode 100644 index 9f2800581ca5e..0000000000000 --- a/Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst +++ /dev/null @@ -1,5 +0,0 @@ -CVE-2020-8492: The :class:`~urllib.request.AbstractBasicAuthHandler` class of the -:mod:`urllib.request` module uses an inefficient regular expression which can -be exploited by an attacker to cause a denial of service. Fix the regex to -prevent the catastrophic backtracking. Vulnerability reported by Ben Caller -and Matt Schwager. diff --git a/Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst b/Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst deleted file mode 100644 index 34b8af28988fa..0000000000000 --- a/Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst +++ /dev/null @@ -1 +0,0 @@ -Disallow control characters in hostnames in http.client, addressing CVE-2019-18348. Such potentially malicious header injection URLs now cause a InvalidURL to be raised. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst deleted file mode 100644 index 6c9447b897bf6..0000000000000 --- a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst +++ /dev/null @@ -1 +0,0 @@ -Disallow CR or LF in email.headerregistry.Address arguments to guard against header injection attacks. diff --git a/README.rst b/README.rst index 81d457c5640b5..8e69337d4032e 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -This is Python version 3.6.10+ -============================== +This is Python version 3.6.10 candidate 1 +========================================= .. image:: https://travis-ci.org/python/cpython.svg?branch=3.6 :alt: CPython build status on Travis CI From webhook-mailer at python.org Thu Jun 18 08:53:27 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 18 Jun 2020 12:53:27 -0000 Subject: [Python-checkins] bpo-38377: Add support.skip_if_broken_multiprocessing_synchronize() (GH-20944) Message-ID: https://github.com/python/cpython/commit/ddbeb2f3e02a510c5784ffd74c5e09e8c70b5881 commit: ddbeb2f3e02a510c5784ffd74c5e09e8c70b5881 branch: master author: Victor Stinner committer: GitHub date: 2020-06-18T14:53:19+02:00 summary: bpo-38377: Add support.skip_if_broken_multiprocessing_synchronize() (GH-20944) On Linux, skip tests using multiprocessing if the current user cannot create a file in /dev/shm/ directory. Add the skip_if_broken_multiprocessing_synchronize() function to the test.support module. files: A Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst M Doc/library/test.rst M Lib/test/_test_multiprocessing.py M Lib/test/support/__init__.py M Lib/test/test_asyncio/test_events.py M Lib/test/test_concurrent_futures.py M Lib/test/test_logging.py M Lib/test/test_multiprocessing_main_handling.py M Lib/test/test_venv.py diff --git a/Doc/library/test.rst b/Doc/library/test.rst index b39b601fb64f6..cd05ef07b4a21 100644 --- a/Doc/library/test.rst +++ b/Doc/library/test.rst @@ -919,6 +919,14 @@ The :mod:`test.support` module defines the following functions: .. versionadded:: 3.6 +.. function:: skip_if_broken_multiprocessing_synchronize() + + Skip tests if the :mod:`multiprocessing.synchronize` module is missing, if + there is no available semaphore implementation, or if creating a lock raises + an :exc:`OSError`. + + .. versionadded:: 3.10 + The :mod:`test.support` module defines the following classes: diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index d01a6680e409c..444e234509c27 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -34,7 +34,7 @@ # Skip tests if _multiprocessing wasn't built. _multiprocessing = test.support.import_module('_multiprocessing') # Skip tests if sem_open implementation is broken. -test.support.import_module('multiprocessing.synchronize') +support.skip_if_broken_multiprocessing_synchronize() import threading import multiprocessing.connection diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index da63d9281b107..d9dbdc13008dc 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1957,3 +1957,25 @@ def wait_process(pid, *, exitcode, timeout=None): # sanity check: it should not fail in practice if pid2 != pid: raise AssertionError(f"pid {pid2} != pid {pid}") + +def skip_if_broken_multiprocessing_synchronize(): + """ + Skip tests if the multiprocessing.synchronize module is missing, if there + is no available semaphore implementation, or if creating a lock raises an + OSError. + """ + + # Skip tests if the _multiprocessing extension is missing. + import_module('_multiprocessing') + + # Skip tests if there is no available semaphore implementation: + # multiprocessing.synchronize requires _multiprocessing.SemLock. + synchronize = import_module('multiprocessing.synchronize') + + try: + # bpo-38377: On Linux, creating a semaphore is the current user + # does not have the permission to create a file in /dev/shm. + # Create a semaphore to check permissions. + synchronize.Lock(ctx=None) + except OSError as exc: + raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}") diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index e7324d2e4811b..ef9d6fc48e9f8 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -2673,10 +2673,10 @@ def tearDown(self): if sys.platform != 'win32': def test_get_event_loop_new_process(self): - # Issue bpo-32126: The multiprocessing module used by + # bpo-32126: The multiprocessing module used by # ProcessPoolExecutor is not functional when the # multiprocessing.synchronize module cannot be imported. - support.import_module('multiprocessing.synchronize') + support.skip_if_broken_multiprocessing_synchronize() async def main(): pool = concurrent.futures.ProcessPoolExecutor() diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py index 0ed75e6098a80..7da967ea6ced5 100644 --- a/Lib/test/test_concurrent_futures.py +++ b/Lib/test/test_concurrent_futures.py @@ -4,7 +4,7 @@ # Skip tests if _multiprocessing wasn't built. support.import_module('_multiprocessing') # Skip tests if sem_open implementation is broken. -support.import_module('multiprocessing.synchronize') +support.skip_if_broken_multiprocessing_synchronize() from test.support import hashlib_helper from test.support.script_helper import assert_python_ok diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 275ce2e45f169..e719d264a9191 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -3630,9 +3630,9 @@ def test_handle_called_with_queue_queue(self, mock_handle): @patch.object(logging.handlers.QueueListener, 'handle') def test_handle_called_with_mp_queue(self, mock_handle): - # Issue 28668: The multiprocessing (mp) module is not functional + # bpo-28668: The multiprocessing (mp) module is not functional # when the mp.synchronize module cannot be imported. - support.import_module('multiprocessing.synchronize') + support.skip_if_broken_multiprocessing_synchronize() for i in range(self.repeat): log_queue = multiprocessing.Queue() self.setup_and_log(log_queue, '%s_%s' % (self.id(), i)) @@ -3656,9 +3656,9 @@ def test_no_messages_in_queue_after_stop(self): indicates that messages were not registered on the queue until _after_ the QueueListener stopped. """ - # Issue 28668: The multiprocessing (mp) module is not functional + # bpo-28668: The multiprocessing (mp) module is not functional # when the mp.synchronize module cannot be imported. - support.import_module('multiprocessing.synchronize') + support.skip_if_broken_multiprocessing_synchronize() for i in range(self.repeat): queue = multiprocessing.Queue() self.setup_and_log(queue, '%s_%s' %(self.id(), i)) diff --git a/Lib/test/test_multiprocessing_main_handling.py b/Lib/test/test_multiprocessing_main_handling.py index b6abfcc7e283d..be1ff10e03a55 100644 --- a/Lib/test/test_multiprocessing_main_handling.py +++ b/Lib/test/test_multiprocessing_main_handling.py @@ -23,7 +23,7 @@ AVAILABLE_START_METHODS = set(multiprocessing.get_all_start_methods()) # Issue #22332: Skip tests if sem_open implementation is broken. -support.import_module('multiprocessing.synchronize') +support.skip_if_broken_multiprocessing_synchronize() verbose = support.verbose diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index ef6d7bd5ad7da..d3191ed7b9955 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -16,7 +16,8 @@ import tempfile from test.support import (captured_stdout, captured_stderr, requires_zlib, can_symlink, EnvironmentVarGuard, rmtree, - import_module) + import_module, + skip_if_broken_multiprocessing_synchronize) import unittest import venv from unittest.mock import patch @@ -357,10 +358,11 @@ def test_multiprocessing(self): """ Test that the multiprocessing is able to spawn. """ - # Issue bpo-36342: Instantiation of a Pool object imports the + # bpo-36342: Instantiation of a Pool object imports the # multiprocessing.synchronize module. Skip the test if this module # cannot be imported. - import_module('multiprocessing.synchronize') + skip_if_broken_multiprocessing_synchronize() + rmtree(self.env_dir) self.run_with_capture(venv.create, self.env_dir) envpy = os.path.join(os.path.realpath(self.env_dir), diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst b/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst new file mode 100644 index 0000000000000..11a30761d36c9 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst @@ -0,0 +1,4 @@ +On Linux, skip tests using multiprocessing if the current user cannot create +a file in ``/dev/shm/`` directory. Add the +:func:`~test.support.skip_if_broken_multiprocessing_synchronize` function to +the :mod:`test.support` module. From webhook-mailer at python.org Thu Jun 18 10:19:03 2020 From: webhook-mailer at python.org (Bar Harel) Date: Thu, 18 Jun 2020 14:19:03 -0000 Subject: [Python-checkins] bpo-40884: Added defaults parameter for logging.Formatter (GH-20668) Message-ID: https://github.com/python/cpython/commit/8f192d12af82c4dc40730bf59814f6a68f68f950 commit: 8f192d12af82c4dc40730bf59814f6a68f68f950 branch: master author: Bar Harel committer: GitHub date: 2020-06-18T07:18:58-07:00 summary: bpo-40884: Added defaults parameter for logging.Formatter (GH-20668) Docs and tests are underway. Automerge-Triggered-By: @vsajip files: A Misc/NEWS.d/next/Library/2020-06-06-02-42-26.bpo-40884.n7fOwS.rst M Doc/library/logging.rst M Lib/logging/__init__.py M Lib/test/test_logging.py diff --git a/Doc/library/logging.rst b/Doc/library/logging.rst index 7267f812cc192..3ff67f76cc3c5 100644 --- a/Doc/library/logging.rst +++ b/Doc/library/logging.rst @@ -529,7 +529,8 @@ The useful mapping keys in a :class:`LogRecord` are given in the section on :ref:`logrecord-attributes`. -.. class:: Formatter(fmt=None, datefmt=None, style='%', validate=True) +.. class:: Formatter(fmt=None, datefmt=None, style='%', validate=True, *, + defaults=None) Returns a new instance of the :class:`Formatter` class. The instance is initialized with a format string for the message as a whole, as well as a @@ -545,6 +546,10 @@ The useful mapping keys in a :class:`LogRecord` are given in the section on :ref:`formatting-styles` for more information on using {- and $-formatting for log messages. + The *defaults* parameter can be a dictionary with default values to use in + custom fields. For example: + ``logging.Formatter('%(ip)s %(message)s', defaults={"ip": None})`` + .. versionchanged:: 3.2 The *style* parameter was added. @@ -553,6 +558,9 @@ The useful mapping keys in a :class:`LogRecord` are given in the section on will raise a ``ValueError``. For example: ``logging.Formatter('%(asctime)s - %(message)s', style='{')``. + .. versionchanged:: 3.10 + The *defaults* parameter was added. + .. method:: format(record) The record's attribute dictionary is used as the operand to a string diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 1c446fd421650..94361ca75f4f3 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -411,8 +411,9 @@ class PercentStyle(object): asctime_search = '%(asctime)' validation_pattern = re.compile(r'%\(\w+\)[#0+ -]*(\*|\d+)?(\.(\*|\d+))?[diouxefgcrsa%]', re.I) - def __init__(self, fmt): + def __init__(self, fmt, *, defaults=None): self._fmt = fmt or self.default_format + self._defaults = defaults def usesTime(self): return self._fmt.find(self.asctime_search) >= 0 @@ -423,7 +424,11 @@ def validate(self): raise ValueError("Invalid format '%s' for '%s' style" % (self._fmt, self.default_format[0])) def _format(self, record): - return self._fmt % record.__dict__ + if defaults := self._defaults: + values = defaults | record.__dict__ + else: + values = record.__dict__ + return self._fmt % values def format(self, record): try: @@ -441,7 +446,11 @@ class StrFormatStyle(PercentStyle): field_spec = re.compile(r'^(\d+|\w+)(\.\w+|\[[^]]+\])*$') def _format(self, record): - return self._fmt.format(**record.__dict__) + if defaults := self._defaults: + values = defaults | record.__dict__ + else: + values = record.__dict__ + return self._fmt.format(**values) def validate(self): """Validate the input format, ensure it is the correct string formatting style""" @@ -467,8 +476,8 @@ class StringTemplateStyle(PercentStyle): asctime_format = '${asctime}' asctime_search = '${asctime}' - def __init__(self, fmt): - self._fmt = fmt or self.default_format + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) self._tpl = Template(self._fmt) def usesTime(self): @@ -490,7 +499,11 @@ def validate(self): raise ValueError('invalid format: no fields') def _format(self, record): - return self._tpl.substitute(**record.__dict__) + if defaults := self._defaults: + values = defaults | record.__dict__ + else: + values = record.__dict__ + return self._tpl.substitute(**values) BASIC_FORMAT = "%(levelname)s:%(name)s:%(message)s" @@ -546,7 +559,8 @@ class Formatter(object): converter = time.localtime - def __init__(self, fmt=None, datefmt=None, style='%', validate=True): + def __init__(self, fmt=None, datefmt=None, style='%', validate=True, *, + defaults=None): """ Initialize the formatter with specified format strings. @@ -565,7 +579,7 @@ def __init__(self, fmt=None, datefmt=None, style='%', validate=True): if style not in _STYLES: raise ValueError('Style must be one of: %s' % ','.join( _STYLES.keys())) - self._style = _STYLES[style][0](fmt) + self._style = _STYLES[style][0](fmt, defaults=defaults) if validate: self._style.validate() diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index e719d264a9191..2ae00b6e3b4e9 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -3710,6 +3710,9 @@ def setUp(self): 'args': (2, 'placeholders'), } self.variants = { + 'custom': { + 'custom': 1234 + } } def get_record(self, name=None): @@ -3926,6 +3929,26 @@ def test_format_validate(self): ) self.assertRaises(ValueError, logging.Formatter, '${asctime', style='$') + def test_defaults_parameter(self): + fmts = ['%(custom)s %(message)s', '{custom} {message}', '$custom $message'] + styles = ['%', '{', '$'] + for fmt, style in zip(fmts, styles): + f = logging.Formatter(fmt, style=style, defaults={'custom': 'Default'}) + r = self.get_record() + self.assertEqual(f.format(r), 'Default Message with 2 placeholders') + r = self.get_record("custom") + self.assertEqual(f.format(r), '1234 Message with 2 placeholders') + + # Without default + f = logging.Formatter(fmt, style=style) + r = self.get_record() + self.assertRaises(ValueError, f.format, r) + + # Non-existing default is ignored + f = logging.Formatter(fmt, style=style, defaults={'Non-existing': 'Default'}) + r = self.get_record("custom") + self.assertEqual(f.format(r), '1234 Message with 2 placeholders') + def test_invalid_style(self): self.assertRaises(ValueError, logging.Formatter, None, None, 'x') diff --git a/Misc/NEWS.d/next/Library/2020-06-06-02-42-26.bpo-40884.n7fOwS.rst b/Misc/NEWS.d/next/Library/2020-06-06-02-42-26.bpo-40884.n7fOwS.rst new file mode 100644 index 0000000000000..64990e8023fba --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-06-02-42-26.bpo-40884.n7fOwS.rst @@ -0,0 +1,3 @@ +Added a `defaults` parameter to :class:`logging.Formatter`, to allow +specifying default values for custom fields. Patch by Asaf Alon and Bar +Harel. From webhook-mailer at python.org Thu Jun 18 12:56:54 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 18 Jun 2020 16:56:54 -0000 Subject: [Python-checkins] bpo-38377: Add support.skip_if_broken_multiprocessing_synchronize() (GH-20944) (GH-20962) (GH-20966) Message-ID: https://github.com/python/cpython/commit/e8056180a13b6755e4e3e5505b7bf03f79da29fb commit: e8056180a13b6755e4e3e5505b7bf03f79da29fb branch: 3.8 author: Victor Stinner committer: GitHub date: 2020-06-18T18:56:43+02:00 summary: bpo-38377: Add support.skip_if_broken_multiprocessing_synchronize() (GH-20944) (GH-20962) (GH-20966) On Linux, skip tests using multiprocessing if the current user cannot create a file in /dev/shm/ directory. Add the skip_if_broken_multiprocessing_synchronize() function to the test.support module. (cherry picked from commit ddbeb2f3e02a510c5784ffd74c5e09e8c70b5881) (cherry picked from commit b1e736113484c99acb57e4acb417b91a9e58e7ff) files: A Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst M Lib/test/_test_multiprocessing.py M Lib/test/support/__init__.py M Lib/test/test_asyncio/test_events.py M Lib/test/test_concurrent_futures.py M Lib/test/test_logging.py M Lib/test/test_multiprocessing_main_handling.py M Lib/test/test_venv.py diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index d5cccac16f451..87f5044148fbe 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -31,7 +31,7 @@ # Skip tests if _multiprocessing wasn't built. _multiprocessing = test.support.import_module('_multiprocessing') # Skip tests if sem_open implementation is broken. -test.support.import_module('multiprocessing.synchronize') +support.skip_if_broken_multiprocessing_synchronize() import threading import multiprocessing.connection diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 0906e7adbae9b..b75dbd214fe36 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -3377,3 +3377,26 @@ def save_restore_warnings_filters(): yield finally: warnings.filters[:] = old_filters + + +def skip_if_broken_multiprocessing_synchronize(): + """ + Skip tests if the multiprocessing.synchronize module is missing, if there + is no available semaphore implementation, or if creating a lock raises an + OSError. + """ + + # Skip tests if the _multiprocessing extension is missing. + import_module('_multiprocessing') + + # Skip tests if there is no available semaphore implementation: + # multiprocessing.synchronize requires _multiprocessing.SemLock. + synchronize = import_module('multiprocessing.synchronize') + + try: + # bpo-38377: On Linux, creating a semaphore is the current user + # does not have the permission to create a file in /dev/shm. + # Create a semaphore to check permissions. + synchronize.Lock(ctx=None) + except OSError as exc: + raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}") diff --git a/Lib/test/test_asyncio/test_events.py b/Lib/test/test_asyncio/test_events.py index 37f1cb7e53a8d..85838f17678af 100644 --- a/Lib/test/test_asyncio/test_events.py +++ b/Lib/test/test_asyncio/test_events.py @@ -2644,10 +2644,10 @@ def tearDown(self): if sys.platform != 'win32': def test_get_event_loop_new_process(self): - # Issue bpo-32126: The multiprocessing module used by + # bpo-32126: The multiprocessing module used by # ProcessPoolExecutor is not functional when the # multiprocessing.synchronize module cannot be imported. - support.import_module('multiprocessing.synchronize') + support.skip_if_broken_multiprocessing_synchronize() async def main(): pool = concurrent.futures.ProcessPoolExecutor() diff --git a/Lib/test/test_concurrent_futures.py b/Lib/test/test_concurrent_futures.py index ac722981659d3..a5a746eb3898f 100644 --- a/Lib/test/test_concurrent_futures.py +++ b/Lib/test/test_concurrent_futures.py @@ -3,7 +3,7 @@ # Skip tests if _multiprocessing wasn't built. test.support.import_module('_multiprocessing') # Skip tests if sem_open implementation is broken. -test.support.import_module('multiprocessing.synchronize') +test.support.skip_if_broken_multiprocessing_synchronize() from test.support.script_helper import assert_python_ok diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 90bf2a4d3ac06..09b273bf1fb2d 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -3621,9 +3621,9 @@ def test_handle_called_with_queue_queue(self, mock_handle): @patch.object(logging.handlers.QueueListener, 'handle') def test_handle_called_with_mp_queue(self, mock_handle): - # Issue 28668: The multiprocessing (mp) module is not functional + # bpo-28668: The multiprocessing (mp) module is not functional # when the mp.synchronize module cannot be imported. - support.import_module('multiprocessing.synchronize') + support.skip_if_broken_multiprocessing_synchronize() for i in range(self.repeat): log_queue = multiprocessing.Queue() self.setup_and_log(log_queue, '%s_%s' % (self.id(), i)) @@ -3647,9 +3647,9 @@ def test_no_messages_in_queue_after_stop(self): indicates that messages were not registered on the queue until _after_ the QueueListener stopped. """ - # Issue 28668: The multiprocessing (mp) module is not functional + # bpo-28668: The multiprocessing (mp) module is not functional # when the mp.synchronize module cannot be imported. - support.import_module('multiprocessing.synchronize') + support.skip_if_broken_multiprocessing_synchronize() for i in range(self.repeat): queue = multiprocessing.Queue() self.setup_and_log(queue, '%s_%s' %(self.id(), i)) diff --git a/Lib/test/test_multiprocessing_main_handling.py b/Lib/test/test_multiprocessing_main_handling.py index b6abfcc7e283d..be1ff10e03a55 100644 --- a/Lib/test/test_multiprocessing_main_handling.py +++ b/Lib/test/test_multiprocessing_main_handling.py @@ -23,7 +23,7 @@ AVAILABLE_START_METHODS = set(multiprocessing.get_all_start_methods()) # Issue #22332: Skip tests if sem_open implementation is broken. -support.import_module('multiprocessing.synchronize') +support.skip_if_broken_multiprocessing_synchronize() verbose = support.verbose diff --git a/Lib/test/test_venv.py b/Lib/test/test_venv.py index 7e05138a80dc8..28743f03ae203 100644 --- a/Lib/test/test_venv.py +++ b/Lib/test/test_venv.py @@ -16,7 +16,8 @@ import tempfile from test.support import (captured_stdout, captured_stderr, requires_zlib, can_symlink, EnvironmentVarGuard, rmtree, - import_module) + import_module, + skip_if_broken_multiprocessing_synchronize) import threading import unittest import venv @@ -324,10 +325,11 @@ def test_multiprocessing(self): """ Test that the multiprocessing is able to spawn. """ - # Issue bpo-36342: Instanciation of a Pool object imports the + # bpo-36342: Instantiation of a Pool object imports the # multiprocessing.synchronize module. Skip the test if this module # cannot be imported. - import_module('multiprocessing.synchronize') + skip_if_broken_multiprocessing_synchronize() + rmtree(self.env_dir) self.run_with_capture(venv.create, self.env_dir) envpy = os.path.join(os.path.realpath(self.env_dir), diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst b/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst new file mode 100644 index 0000000000000..11a30761d36c9 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst @@ -0,0 +1,4 @@ +On Linux, skip tests using multiprocessing if the current user cannot create +a file in ``/dev/shm/`` directory. Add the +:func:`~test.support.skip_if_broken_multiprocessing_synchronize` function to +the :mod:`test.support` module. From webhook-mailer at python.org Thu Jun 18 15:08:40 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 18 Jun 2020 19:08:40 -0000 Subject: [Python-checkins] bpo-38144: Add the root_dir and dir_fd parameters in glob.glob(). (GH-16075) Message-ID: https://github.com/python/cpython/commit/8a64ceaf9856e7570cad6f5d628cce789834e019 commit: 8a64ceaf9856e7570cad6f5d628cce789834e019 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-18T22:08:27+03:00 summary: bpo-38144: Add the root_dir and dir_fd parameters in glob.glob(). (GH-16075) files: A Misc/NEWS.d/next/Library/2019-09-12-21-34-03.bpo-38144.8uQCdd.rst M Doc/library/glob.rst M Doc/whatsnew/3.10.rst M Lib/glob.py M Lib/test/test_glob.py diff --git a/Doc/library/glob.rst b/Doc/library/glob.rst index 92a8c4d1eb871..280e9f0826602 100644 --- a/Doc/library/glob.rst +++ b/Doc/library/glob.rst @@ -36,7 +36,7 @@ For example, ``'[?]'`` matches the character ``'?'``. The :mod:`pathlib` module offers high-level path objects. -.. function:: glob(pathname, *, recursive=False) +.. function:: glob(pathname, *, root_dir=None, dir_fd=None, recursive=False) Return a possibly-empty list of path names that match *pathname*, which must be a string containing a path specification. *pathname* can be either absolute @@ -45,6 +45,15 @@ For example, ``'[?]'`` matches the character ``'?'``. symlinks are included in the results (as in the shell). Whether or not the results are sorted depends on the file system. + If *root_dir* is not ``None``, it should be a :term:`path-like object` + specifying the root directory for searching. It has the same effect on + :func:`glob` as changing the current directory before calling it. If + *pathname* is relative, the result will contain paths relative to + *root_dir*. + + This function can support :ref:`paths relative to directory descriptors + ` with the *dir_fd* parameter. + .. index:: single: **; in glob-style wildcards @@ -62,8 +71,11 @@ For example, ``'[?]'`` matches the character ``'?'``. .. versionchanged:: 3.5 Support for recursive globs using "``**``". + .. versionchanged:: 3.10 + Added the *root_dir* and *dir_fd* parameters. + -.. function:: iglob(pathname, *, recursive=False) +.. function:: iglob(pathname, *, root_dir=None, dir_fd=None, recursive=False) Return an :term:`iterator` which yields the same values as :func:`glob` without actually storing them all simultaneously. diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index f956ddd45dca9..566827bf90ff3 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -100,6 +100,14 @@ New Modules Improved Modules ================ +glob +---- + +Added the *root_dir* and *dir_fd* parameters in :func:`~glob.glob` and +:func:`~glob.iglob` which allow to specify the root directory for searching. +(Contributed by Serhiy Storchaka in :issue:`38144`.) + + Optimizations ============= diff --git a/Lib/glob.py b/Lib/glob.py index 0dd2f8be66109..3c449a90dffef 100644 --- a/Lib/glob.py +++ b/Lib/glob.py @@ -3,11 +3,13 @@ import os import re import fnmatch +import itertools +import stat import sys __all__ = ["glob", "iglob", "escape"] -def glob(pathname, *, recursive=False): +def glob(pathname, *, root_dir=None, dir_fd=None, recursive=False): """Return a list of paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la @@ -18,9 +20,9 @@ def glob(pathname, *, recursive=False): If recursive is true, the pattern '**' will match any files and zero or more directories and subdirectories. """ - return list(iglob(pathname, recursive=recursive)) + return list(iglob(pathname, root_dir=root_dir, dir_fd=dir_fd, recursive=recursive)) -def iglob(pathname, *, recursive=False): +def iglob(pathname, *, root_dir=None, dir_fd=None, recursive=False): """Return an iterator which yields the paths matching a pathname pattern. The pattern may contain simple shell-style wildcards a la @@ -31,36 +33,43 @@ def iglob(pathname, *, recursive=False): If recursive is true, the pattern '**' will match any files and zero or more directories and subdirectories. """ - sys.audit("glob.glob", pathname, recursive) - it = _iglob(pathname, recursive, False) - if recursive and _isrecursive(pathname): - s = next(it) # skip empty string - assert not s + if root_dir is not None: + root_dir = os.fspath(root_dir) + else: + root_dir = pathname[:0] + it = _iglob(pathname, root_dir, dir_fd, recursive, False) + if not pathname or recursive and _isrecursive(pathname[:2]): + try: + s = next(it) # skip empty string + if s: + it = itertools.chain((s,), it) + except StopIteration: + pass return it -def _iglob(pathname, recursive, dironly): +def _iglob(pathname, root_dir, dir_fd, recursive, dironly): dirname, basename = os.path.split(pathname) if not has_magic(pathname): assert not dironly if basename: - if os.path.lexists(pathname): + if _lexists(_join(root_dir, pathname), dir_fd): yield pathname else: # Patterns ending with a slash should match only directories - if os.path.isdir(dirname): + if _isdir(_join(root_dir, dirname), dir_fd): yield pathname return if not dirname: if recursive and _isrecursive(basename): - yield from _glob2(dirname, basename, dironly) + yield from _glob2(root_dir, basename, dir_fd, dironly) else: - yield from _glob1(dirname, basename, dironly) + yield from _glob1(root_dir, basename, dir_fd, dironly) return # `os.path.split()` returns the argument itself as a dirname if it is a # drive or UNC path. Prevent an infinite recursion if a drive or UNC path # contains magic characters (i.e. r'\\?\C:'). if dirname != pathname and has_magic(dirname): - dirs = _iglob(dirname, recursive, True) + dirs = _iglob(dirname, root_dir, dir_fd, recursive, True) else: dirs = [dirname] if has_magic(basename): @@ -71,76 +80,121 @@ def _iglob(pathname, recursive, dironly): else: glob_in_dir = _glob0 for dirname in dirs: - for name in glob_in_dir(dirname, basename, dironly): + for name in glob_in_dir(_join(root_dir, dirname), basename, dir_fd, dironly): yield os.path.join(dirname, name) # These 2 helper functions non-recursively glob inside a literal directory. # They return a list of basenames. _glob1 accepts a pattern while _glob0 # takes a literal basename (so it only has to check for its existence). -def _glob1(dirname, pattern, dironly): - names = list(_iterdir(dirname, dironly)) +def _glob1(dirname, pattern, dir_fd, dironly): + names = list(_iterdir(dirname, dir_fd, dironly)) if not _ishidden(pattern): names = (x for x in names if not _ishidden(x)) return fnmatch.filter(names, pattern) -def _glob0(dirname, basename, dironly): - if not basename: - # `os.path.split()` returns an empty basename for paths ending with a - # directory separator. 'q*x/' should match only directories. - if os.path.isdir(dirname): +def _glob0(dirname, basename, dir_fd, dironly): + if basename: + if _lexists(_join(dirname, basename), dir_fd): return [basename] else: - if os.path.lexists(os.path.join(dirname, basename)): + # `os.path.split()` returns an empty basename for paths ending with a + # directory separator. 'q*x/' should match only directories. + if _isdir(dirname, dir_fd): return [basename] return [] # Following functions are not public but can be used by third-party code. def glob0(dirname, pattern): - return _glob0(dirname, pattern, False) + return _glob0(dirname, pattern, None, False) def glob1(dirname, pattern): - return _glob1(dirname, pattern, False) + return _glob1(dirname, pattern, None, False) # This helper function recursively yields relative pathnames inside a literal # directory. -def _glob2(dirname, pattern, dironly): +def _glob2(dirname, pattern, dir_fd, dironly): assert _isrecursive(pattern) yield pattern[:0] - yield from _rlistdir(dirname, dironly) + yield from _rlistdir(dirname, dir_fd, dironly) # If dironly is false, yields all file names inside a directory. # If dironly is true, yields only directory names. -def _iterdir(dirname, dironly): - if not dirname: - if isinstance(dirname, bytes): - dirname = bytes(os.curdir, 'ASCII') - else: - dirname = os.curdir +def _iterdir(dirname, dir_fd, dironly): try: - with os.scandir(dirname) as it: - for entry in it: - try: - if not dironly or entry.is_dir(): - yield entry.name - except OSError: - pass + fd = None + fsencode = None + if dir_fd is not None: + if dirname: + fd = arg = os.open(dirname, _dir_open_flags, dir_fd=dir_fd) + else: + arg = dir_fd + if isinstance(dirname, bytes): + fsencode = os.fsencode + elif dirname: + arg = dirname + elif isinstance(dirname, bytes): + arg = bytes(os.curdir, 'ASCII') + else: + arg = os.curdir + try: + with os.scandir(arg) as it: + for entry in it: + try: + if not dironly or entry.is_dir(): + if fsencode is not None: + yield fsencode(entry.name) + else: + yield entry.name + except OSError: + pass + finally: + if fd is not None: + os.close(fd) except OSError: return # Recursively yields relative pathnames inside a literal directory. -def _rlistdir(dirname, dironly): - names = list(_iterdir(dirname, dironly)) +def _rlistdir(dirname, dir_fd, dironly): + names = list(_iterdir(dirname, dir_fd, dironly)) for x in names: if not _ishidden(x): yield x - path = os.path.join(dirname, x) if dirname else x - for y in _rlistdir(path, dironly): - yield os.path.join(x, y) + path = _join(dirname, x) if dirname else x + for y in _rlistdir(path, dir_fd, dironly): + yield _join(x, y) +def _lexists(pathname, dir_fd): + # Same as os.path.lexists(), but with dir_fd + if dir_fd is None: + return os.path.lexists(pathname) + try: + os.lstat(pathname, dir_fd=dir_fd) + except (OSError, ValueError): + return False + else: + return True + +def _isdir(pathname, dir_fd): + # Same as os.path.isdir(), but with dir_fd + if dir_fd is None: + return os.path.isdir(pathname) + try: + st = os.stat(pathname, dir_fd=dir_fd) + except (OSError, ValueError): + return False + else: + return stat.S_ISDIR(st.st_mode) + +def _join(dirname, basename): + # It is common if dirname or basename is empty + if not dirname or not basename: + return dirname or basename + return os.path.join(dirname, basename) + magic_check = re.compile('([*?[])') magic_check_bytes = re.compile(b'([*?[])') @@ -171,3 +225,6 @@ def escape(pathname): else: pathname = magic_check.sub(r'[\1]', pathname) return drive + pathname + + +_dir_open_flags = os.O_RDONLY | getattr(os, 'O_DIRECTORY', 0) diff --git a/Lib/test/test_glob.py b/Lib/test/test_glob.py index cba8c7c60e217..f8158523a0469 100644 --- a/Lib/test/test_glob.py +++ b/Lib/test/test_glob.py @@ -9,6 +9,7 @@ class GlobTests(unittest.TestCase): + dir_fd = None def norm(self, *parts): return os.path.normpath(os.path.join(self.tempdir, *parts)) @@ -38,8 +39,14 @@ def setUp(self): os.symlink(self.norm('broken'), self.norm('sym1')) os.symlink('broken', self.norm('sym2')) os.symlink(os.path.join('a', 'bcd'), self.norm('sym3')) + if {os.open, os.stat} <= os.supports_dir_fd and os.scandir in os.supports_fd: + self.dir_fd = os.open(self.tempdir, os.O_RDONLY | os.O_DIRECTORY) + else: + self.dir_fd = None def tearDown(self): + if self.dir_fd is not None: + os.close(self.dir_fd) shutil.rmtree(self.tempdir) def glob(self, *parts, **kwargs): @@ -53,6 +60,41 @@ def glob(self, *parts, **kwargs): bres = [os.fsencode(x) for x in res] self.assertCountEqual(glob.glob(os.fsencode(p), **kwargs), bres) self.assertCountEqual(glob.iglob(os.fsencode(p), **kwargs), bres) + + with change_cwd(self.tempdir): + res2 = glob.glob(pattern, **kwargs) + for x in res2: + self.assertFalse(os.path.isabs(x), x) + if pattern == '**' or pattern == '**' + os.sep: + expected = res[1:] + else: + expected = res + self.assertCountEqual([os.path.join(self.tempdir, x) for x in res2], + expected) + self.assertCountEqual(glob.iglob(pattern, **kwargs), res2) + bpattern = os.fsencode(pattern) + bres2 = [os.fsencode(x) for x in res2] + self.assertCountEqual(glob.glob(bpattern, **kwargs), bres2) + self.assertCountEqual(glob.iglob(bpattern, **kwargs), bres2) + + self.assertCountEqual(glob.glob(pattern, root_dir=self.tempdir, **kwargs), res2) + self.assertCountEqual(glob.iglob(pattern, root_dir=self.tempdir, **kwargs), res2) + btempdir = os.fsencode(self.tempdir) + self.assertCountEqual( + glob.glob(bpattern, root_dir=btempdir, **kwargs), bres2) + self.assertCountEqual( + glob.iglob(bpattern, root_dir=btempdir, **kwargs), bres2) + + if self.dir_fd is not None: + self.assertCountEqual( + glob.glob(pattern, dir_fd=self.dir_fd, **kwargs), res2) + self.assertCountEqual( + glob.iglob(pattern, dir_fd=self.dir_fd, **kwargs), res2) + self.assertCountEqual( + glob.glob(bpattern, dir_fd=self.dir_fd, **kwargs), bres2) + self.assertCountEqual( + glob.iglob(bpattern, dir_fd=self.dir_fd, **kwargs), bres2) + return res def assertSequencesEqual_noorder(self, l1, l2): @@ -78,6 +120,14 @@ def test_glob_literal(self): res = glob.glob(os.path.join(os.fsencode(os.curdir), b'*')) self.assertEqual({type(r) for r in res}, {bytes}) + def test_glob_empty_pattern(self): + self.assertEqual(glob.glob(''), []) + self.assertEqual(glob.glob(b''), []) + self.assertEqual(glob.glob('', root_dir=self.tempdir), []) + self.assertEqual(glob.glob(b'', root_dir=os.fsencode(self.tempdir)), []) + self.assertEqual(glob.glob('', dir_fd=self.dir_fd), []) + self.assertEqual(glob.glob(b'', dir_fd=self.dir_fd), []) + def test_glob_one_directory(self): eq = self.assertSequencesEqual_noorder eq(self.glob('a*'), map(self.norm, ['a', 'aab', 'aaa'])) diff --git a/Misc/NEWS.d/next/Library/2019-09-12-21-34-03.bpo-38144.8uQCdd.rst b/Misc/NEWS.d/next/Library/2019-09-12-21-34-03.bpo-38144.8uQCdd.rst new file mode 100644 index 0000000000000..2c335bf29cfb3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2019-09-12-21-34-03.bpo-38144.8uQCdd.rst @@ -0,0 +1 @@ +Added the *root_dir* and *dir_fd* parameters in :func:`glob.glob`. From webhook-mailer at python.org Thu Jun 18 18:23:48 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Thu, 18 Jun 2020 22:23:48 -0000 Subject: [Python-checkins] Update CODEOWNERS to account for the new parser location (GH-20971) Message-ID: https://github.com/python/cpython/commit/d906f0ec1a5f4ec29a4de74240acf43139886514 commit: d906f0ec1a5f4ec29a4de74240acf43139886514 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-18T23:23:40+01:00 summary: Update CODEOWNERS to account for the new parser location (GH-20971) files: M .github/CODEOWNERS diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 4d80698eff39c..c33bf1ee4d4e2 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -72,10 +72,10 @@ Include/pytime.h @pganssle @abalkin /Modules/gcmodule.c @pablogsal /Doc/library/gc.rst @pablogsal -# Parser/Pgen -/Parser/pgen/ @pablogsal -/Parser/pegen/ @pablogsal +# Parser +/Parser/ @pablogsal /Tools/peg_generator/ @pablogsal +/Lib/test/test_peg_generator/ @pablogsal # SQLite 3 **/*sqlite* @berkerpeksag From webhook-mailer at python.org Thu Jun 18 19:10:52 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Thu, 18 Jun 2020 23:10:52 -0000 Subject: [Python-checkins] bpo-40334: Produce better error messages on invalid targets (GH-20106) Message-ID: https://github.com/python/cpython/commit/01ece63d42b830df106948db0aefa6c1ba24416a commit: 01ece63d42b830df106948db0aefa6c1ba24416a branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-19T00:10:43+01:00 summary: bpo-40334: Produce better error messages on invalid targets (GH-20106) The following error messages get produced: - `cannot delete ...` for invalid `del` targets - `... is an illegal 'for' target` for invalid targets in for statements - `... is an illegal 'with' target` for invalid targets in with statements Additionally, a few `cut`s were added in various places before the invocation of the `invalid_*` rule, in order to speed things up. Co-authored-by: Pablo Galindo files: M Grammar/python.gram M Lib/test/test_exceptions.py M Lib/test/test_syntax.py M Parser/parser.c M Parser/pegen.c M Parser/pegen.h diff --git a/Grammar/python.gram b/Grammar/python.gram index 0dfbeb9598d70..e4abca9388eb0 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -93,7 +93,7 @@ assignment[stmt_ty]: CHECK_VERSION(6, "Variable annotations syntax is", _Py_AnnAssign(a, b, c, 0, EXTRA)) } | a=(z=star_targets '=' { z })+ b=(yield_expr | star_expressions) !'=' tc=[TYPE_COMMENT] { _Py_Assign(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | a=single_target b=augassign c=(yield_expr | star_expressions) { + | a=single_target b=augassign ~ c=(yield_expr | star_expressions) { _Py_AugAssign(a, b->kind, c, EXTRA) } | invalid_assignment @@ -121,7 +121,9 @@ yield_stmt[stmt_ty]: y=yield_expr { _Py_Expr(y, EXTRA) } assert_stmt[stmt_ty]: 'assert' a=expression b=[',' z=expression { z }] { _Py_Assert(a, b, EXTRA) } -del_stmt[stmt_ty]: 'del' a=del_targets { _Py_Delete(a, EXTRA) } +del_stmt[stmt_ty]: + | 'del' a=del_targets &(';' | NEWLINE) { _Py_Delete(a, EXTRA) } + | invalid_del_stmt import_stmt[stmt_ty]: import_name | import_from import_name[stmt_ty]: 'import' a=dotted_as_names { _Py_Import(a, EXTRA) } @@ -164,10 +166,11 @@ while_stmt[stmt_ty]: | 'while' a=named_expression ':' b=block c=[else_block] { _Py_While(a, b, c, EXTRA) } for_stmt[stmt_ty]: - | 'for' t=star_targets 'in' ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { + | 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { _Py_For(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA) } - | ASYNC 'for' t=star_targets 'in' ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { + | ASYNC 'for' t=star_targets 'in' ~ ex=star_expressions ':' tc=[TYPE_COMMENT] b=block el=[else_block] { CHECK_VERSION(5, "Async for loops are", _Py_AsyncFor(t, ex, b, el, NEW_TYPE_COMMENT(p, tc), EXTRA)) } + | invalid_for_target with_stmt[stmt_ty]: | 'with' '(' a=','.with_item+ ','? ')' ':' b=block { @@ -179,7 +182,9 @@ with_stmt[stmt_ty]: | ASYNC 'with' a=','.with_item+ ':' tc=[TYPE_COMMENT] b=block { CHECK_VERSION(5, "Async with statements are", _Py_AsyncWith(a, b, NEW_TYPE_COMMENT(p, tc), EXTRA)) } with_item[withitem_ty]: - | e=expression o=['as' t=target { t }] { _Py_withitem(e, o, p->arena) } + | e=expression 'as' t=target &(',' | ')' | ':') { _Py_withitem(e, t, p->arena) } + | invalid_with_item + | e=expression { _Py_withitem(e, NULL, p->arena) } try_stmt[stmt_ty]: | 'try' ':' b=block f=finally_block { _Py_Try(b, NULL, NULL, f, EXTRA) } @@ -311,7 +316,7 @@ star_named_expression[expr_ty]: | '*' a=bitwise_or { _Py_Starred(a, Load, EXTRA) } | named_expression named_expression[expr_ty]: - | a=NAME ':=' b=expression { _Py_NamedExpr(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, EXTRA) } + | a=NAME ':=' ~ b=expression { _Py_NamedExpr(CHECK(_PyPegen_set_expr_context(p, a, Store)), b, EXTRA) } | expression !':=' | invalid_named_expression @@ -487,18 +492,20 @@ strings[expr_ty] (memo): a=STRING+ { _PyPegen_concatenate_strings(p, a) } list[expr_ty]: | '[' a=[star_named_expressions] ']' { _Py_List(a, Load, EXTRA) } listcomp[expr_ty]: - | '[' a=named_expression b=for_if_clauses ']' { _Py_ListComp(a, b, EXTRA) } + | '[' a=named_expression ~ b=for_if_clauses ']' { _Py_ListComp(a, b, EXTRA) } | invalid_comprehension tuple[expr_ty]: | '(' a=[y=star_named_expression ',' z=[star_named_expressions] { _PyPegen_seq_insert_in_front(p, y, z) } ] ')' { _Py_Tuple(a, Load, EXTRA) } -group[expr_ty]: '(' a=(yield_expr | named_expression) ')' { a } +group[expr_ty]: + | '(' a=(yield_expr | named_expression) ')' { a } + | invalid_group genexp[expr_ty]: - | '(' a=expression b=for_if_clauses ')' { _Py_GeneratorExp(a, b, EXTRA) } + | '(' a=expression ~ b=for_if_clauses ')' { _Py_GeneratorExp(a, b, EXTRA) } | invalid_comprehension set[expr_ty]: '{' a=expressions_list '}' { _Py_Set(a, EXTRA) } setcomp[expr_ty]: - | '{' a=expression b=for_if_clauses '}' { _Py_SetComp(a, b, EXTRA) } + | '{' a=expression ~ b=for_if_clauses '}' { _Py_SetComp(a, b, EXTRA) } | invalid_comprehension dict[expr_ty]: | '{' a=[double_starred_kvpairs] '}' { @@ -514,10 +521,11 @@ kvpair[KeyValuePair*]: a=expression ':' b=expression { _PyPegen_key_value_pair(p for_if_clauses[asdl_seq*]: | for_if_clause+ for_if_clause[comprehension_ty]: - | ASYNC 'for' a=star_targets 'in' b=disjunction c=('if' z=disjunction { z })* { + | ASYNC 'for' a=star_targets 'in' ~ b=disjunction c=('if' z=disjunction { z })* { CHECK_VERSION(6, "Async comprehensions are", _Py_comprehension(a, b, c, 1, p->arena)) } - | 'for' a=star_targets 'in' b=disjunction c=('if' z=disjunction { z })* { + | 'for' a=star_targets 'in' ~ b=disjunction c=('if' z=disjunction { z })* { _Py_comprehension(a, b, c, 0, p->arena) } + | invalid_for_target yield_expr[expr_ty]: | 'yield' 'from' a=expression { _Py_YieldFrom(a, EXTRA) } @@ -587,19 +595,15 @@ single_subscript_attribute_target[expr_ty]: | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Store, EXTRA) } del_targets[asdl_seq*]: a=','.del_target+ [','] { a } -# The lookaheads to del_target_end ensure that we don't match expressions where a prefix of the -# expression matches our rule, thereby letting these cases fall through to invalid_del_target. del_target[expr_ty] (memo): - | a=t_primary '.' b=NAME &del_target_end { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } - | a=t_primary '[' b=slices ']' &del_target_end { _Py_Subscript(a, b, Del, EXTRA) } + | a=t_primary '.' b=NAME !t_lookahead { _Py_Attribute(a, b->v.Name.id, Del, EXTRA) } + | a=t_primary '[' b=slices ']' !t_lookahead { _Py_Subscript(a, b, Del, EXTRA) } | del_t_atom del_t_atom[expr_ty]: - | a=NAME &del_target_end { _PyPegen_set_expr_context(p, a, Del) } + | a=NAME { _PyPegen_set_expr_context(p, a, Del) } | '(' a=del_target ')' { _PyPegen_set_expr_context(p, a, Del) } | '(' a=[del_targets] ')' { _Py_Tuple(a, Del, EXTRA) } | '[' a=[del_targets] ']' { _Py_List(a, Del, EXTRA) } - | invalid_del_target -del_target_end: ')' | ']' | ',' | ';' | NEWLINE targets[asdl_seq*]: a=','.target+ [','] { a } target[expr_ty] (memo): @@ -650,8 +654,8 @@ invalid_assignment: RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } | (star_targets '=')* a=star_expressions '=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( - _PyPegen_get_invalid_target(a), - "cannot assign to %s", _PyPegen_get_expr_name(_PyPegen_get_invalid_target(a))) } + GET_INVALID_TARGET(a), + "cannot assign to %s", _PyPegen_get_expr_name(GET_INVALID_TARGET(a))) } | (star_targets '=')* a=yield_expr '=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "assignment to yield expression not possible") } | a=star_expressions augassign (yield_expr | star_expressions) { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( @@ -659,7 +663,14 @@ invalid_assignment: "'%s' is an illegal expression for augmented assignment", _PyPegen_get_expr_name(a) )} - +invalid_del_stmt: + | 'del' a=star_expressions { + GET_INVALID_DEL_TARGET(a) != NULL ? + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + GET_INVALID_DEL_TARGET(a), + "cannot delete %s", _PyPegen_get_expr_name(GET_INVALID_DEL_TARGET(a)) + ) : + RAISE_SYNTAX_ERROR("invalid syntax") } invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: @@ -682,9 +693,25 @@ invalid_lambda_star_etc: invalid_double_type_comments: | TYPE_COMMENT NEWLINE TYPE_COMMENT NEWLINE INDENT { RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } -invalid_del_target: - | a=star_expression &del_target_end { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "cannot delete %s", _PyPegen_get_expr_name(a)) } +invalid_with_item: + | expression 'as' a=expression { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + GET_INVALID_TARGET(a), + "cannot assign to %s", _PyPegen_get_expr_name(GET_INVALID_TARGET(a)) + ) } + +invalid_for_target: + | ASYNC? 'for' a=star_expressions { + GET_INVALID_FOR_TARGET(a) != NULL ? + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + GET_INVALID_FOR_TARGET(a), + "cannot assign to %s", _PyPegen_get_expr_name(GET_INVALID_FOR_TARGET(a)) + ) : + RAISE_SYNTAX_ERROR("invalid syntax") } + +invalid_group: + | '(' a=starred_expression ')' { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "can't use starred expression here") } invalid_import_from_targets: | import_from_as_names ',' { RAISE_SYNTAX_ERROR("trailing comma not allowed without surrounding parentheses") } diff --git a/Lib/test/test_exceptions.py b/Lib/test/test_exceptions.py index feae31b142bf3..a67e69bfff728 100644 --- a/Lib/test/test_exceptions.py +++ b/Lib/test/test_exceptions.py @@ -251,9 +251,9 @@ def baz(): check('def f():\n x, y: int', 2, 3) check('[*x for x in xs]', 1, 2) check('foo(x for x in range(10), 100)', 1, 5) + check('for 1 in []: pass', 1, 5) check('(yield i) = 2', 1, 2) check('def f(*):\n pass', 1, 8) - check('for 1 in []: pass', 1, 7) @cpython_only def testSettingException(self): diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 6ea9a55e6f380..9bb3d9ee44448 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -164,6 +164,65 @@ Traceback (most recent call last): SyntaxError: 'list' is an illegal expression for augmented assignment +Invalid targets in `for` loops and `with` statements should also +produce a specialized error message + +>>> for a() in b: pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> for (a, b()) in b: pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> for [a, b()] in b: pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> for (*a, b, c+1) in b: pass +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> for (x, *(y, z.d())) in b: pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> for a, b() in c: pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> for a, b, (c + 1, d()): pass +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> for i < (): pass +Traceback (most recent call last): +SyntaxError: invalid syntax + +>>> with a as b(): pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> with a as (b, c()): pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> with a as [b, c()]: pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> with a as (*b, c, d+1): pass +Traceback (most recent call last): +SyntaxError: cannot assign to operator + +>>> with a as (x, *(y, z.d())): pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + +>>> with a as b, c as d(): pass +Traceback (most recent call last): +SyntaxError: cannot assign to function call + >>> p = p = Traceback (most recent call last): SyntaxError: invalid syntax @@ -739,7 +798,7 @@ def test_assign_del(self): self._check_error("del (1, 2)", "delete literal") self._check_error("del None", "delete None") self._check_error("del *x", "delete starred") - self._check_error("del (*x)", "delete starred") + self._check_error("del (*x)", "use starred expression") self._check_error("del (*x,)", "delete starred") self._check_error("del [*x,]", "delete starred") self._check_error("del f()", "delete function call") diff --git a/Parser/parser.c b/Parser/parser.c index d28e6c83aadb0..1531c99f83891 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -14,8 +14,8 @@ static KeywordToken *reserved_keywords[] = { (KeywordToken[]) { {"if", 510}, {"in", 518}, - {"is", 526}, - {"as", 530}, + {"as", 520}, + {"is", 527}, {"or", 531}, {NULL, -1}, }, @@ -23,8 +23,8 @@ static KeywordToken *reserved_keywords[] = { {"del", 503}, {"try", 511}, {"for", 517}, - {"def", 522}, - {"not", 525}, + {"def", 523}, + {"not", 526}, {"and", 532}, {NULL, -1}, }, @@ -34,8 +34,8 @@ static KeywordToken *reserved_keywords[] = { {"elif", 515}, {"else", 516}, {"with", 519}, - {"True", 527}, - {"None", 529}, + {"True", 528}, + {"None", 530}, {NULL, -1}, }, (KeywordToken[]) { @@ -43,8 +43,8 @@ static KeywordToken *reserved_keywords[] = { {"yield", 504}, {"break", 506}, {"while", 512}, - {"class", 523}, - {"False", 528}, + {"class", 524}, + {"False", 529}, {NULL, -1}, }, (KeywordToken[]) { @@ -52,12 +52,12 @@ static KeywordToken *reserved_keywords[] = { {"assert", 505}, {"global", 508}, {"import", 513}, - {"except", 520}, - {"lambda", 524}, + {"except", 521}, + {"lambda", 525}, {NULL, -1}, }, (KeywordToken[]) { - {"finally", 521}, + {"finally", 522}, {NULL, -1}, }, (KeywordToken[]) { @@ -204,16 +204,16 @@ static KeywordToken *reserved_keywords[] = { #define del_targets_type 1135 #define del_target_type 1136 #define del_t_atom_type 1137 -#define del_target_end_type 1138 -#define targets_type 1139 -#define target_type 1140 -#define t_primary_type 1141 // Left-recursive -#define t_lookahead_type 1142 -#define t_atom_type 1143 -#define incorrect_arguments_type 1144 -#define invalid_kwarg_type 1145 -#define invalid_named_expression_type 1146 -#define invalid_assignment_type 1147 +#define targets_type 1138 +#define target_type 1139 +#define t_primary_type 1140 // Left-recursive +#define t_lookahead_type 1141 +#define t_atom_type 1142 +#define incorrect_arguments_type 1143 +#define invalid_kwarg_type 1144 +#define invalid_named_expression_type 1145 +#define invalid_assignment_type 1146 +#define invalid_del_stmt_type 1147 #define invalid_block_type 1148 #define invalid_comprehension_type 1149 #define invalid_dict_comprehension_type 1150 @@ -222,162 +222,165 @@ static KeywordToken *reserved_keywords[] = { #define invalid_star_etc_type 1153 #define invalid_lambda_star_etc_type 1154 #define invalid_double_type_comments_type 1155 -#define invalid_del_target_type 1156 -#define invalid_import_from_targets_type 1157 -#define _loop0_1_type 1158 -#define _loop0_2_type 1159 -#define _loop0_4_type 1160 -#define _gather_3_type 1161 -#define _loop0_6_type 1162 -#define _gather_5_type 1163 -#define _loop0_8_type 1164 -#define _gather_7_type 1165 -#define _loop0_10_type 1166 -#define _gather_9_type 1167 -#define _loop1_11_type 1168 -#define _loop0_13_type 1169 -#define _gather_12_type 1170 -#define _tmp_14_type 1171 -#define _tmp_15_type 1172 -#define _tmp_16_type 1173 -#define _tmp_17_type 1174 -#define _tmp_18_type 1175 -#define _tmp_19_type 1176 -#define _tmp_20_type 1177 -#define _tmp_21_type 1178 -#define _loop1_22_type 1179 -#define _tmp_23_type 1180 -#define _tmp_24_type 1181 -#define _loop0_26_type 1182 -#define _gather_25_type 1183 -#define _loop0_28_type 1184 -#define _gather_27_type 1185 -#define _tmp_29_type 1186 -#define _loop0_30_type 1187 -#define _loop1_31_type 1188 -#define _loop0_33_type 1189 -#define _gather_32_type 1190 -#define _tmp_34_type 1191 -#define _loop0_36_type 1192 -#define _gather_35_type 1193 -#define _tmp_37_type 1194 -#define _loop0_39_type 1195 -#define _gather_38_type 1196 -#define _loop0_41_type 1197 -#define _gather_40_type 1198 -#define _loop0_43_type 1199 -#define _gather_42_type 1200 -#define _loop0_45_type 1201 -#define _gather_44_type 1202 -#define _tmp_46_type 1203 -#define _loop1_47_type 1204 -#define _tmp_48_type 1205 -#define _tmp_49_type 1206 -#define _tmp_50_type 1207 -#define _tmp_51_type 1208 -#define _tmp_52_type 1209 -#define _loop0_53_type 1210 -#define _loop0_54_type 1211 -#define _loop0_55_type 1212 -#define _loop1_56_type 1213 -#define _loop0_57_type 1214 -#define _loop1_58_type 1215 -#define _loop1_59_type 1216 -#define _loop1_60_type 1217 -#define _loop0_61_type 1218 -#define _loop1_62_type 1219 -#define _loop0_63_type 1220 -#define _loop1_64_type 1221 -#define _loop0_65_type 1222 -#define _loop1_66_type 1223 -#define _loop1_67_type 1224 -#define _tmp_68_type 1225 -#define _loop0_70_type 1226 -#define _gather_69_type 1227 -#define _loop1_71_type 1228 -#define _loop0_73_type 1229 -#define _gather_72_type 1230 -#define _loop1_74_type 1231 -#define _loop0_75_type 1232 -#define _loop0_76_type 1233 -#define _loop0_77_type 1234 -#define _loop1_78_type 1235 -#define _loop0_79_type 1236 -#define _loop1_80_type 1237 -#define _loop1_81_type 1238 -#define _loop1_82_type 1239 -#define _loop0_83_type 1240 -#define _loop1_84_type 1241 -#define _loop0_85_type 1242 -#define _loop1_86_type 1243 -#define _loop0_87_type 1244 -#define _loop1_88_type 1245 -#define _loop1_89_type 1246 -#define _loop1_90_type 1247 -#define _loop1_91_type 1248 -#define _tmp_92_type 1249 -#define _loop0_94_type 1250 -#define _gather_93_type 1251 -#define _tmp_95_type 1252 -#define _tmp_96_type 1253 -#define _tmp_97_type 1254 -#define _tmp_98_type 1255 -#define _loop1_99_type 1256 -#define _tmp_100_type 1257 -#define _tmp_101_type 1258 -#define _loop0_103_type 1259 -#define _gather_102_type 1260 -#define _loop1_104_type 1261 -#define _loop0_105_type 1262 -#define _loop0_106_type 1263 -#define _tmp_107_type 1264 -#define _tmp_108_type 1265 -#define _loop0_110_type 1266 -#define _gather_109_type 1267 -#define _loop0_112_type 1268 -#define _gather_111_type 1269 -#define _loop0_114_type 1270 -#define _gather_113_type 1271 -#define _loop0_116_type 1272 -#define _gather_115_type 1273 -#define _loop0_117_type 1274 -#define _loop0_119_type 1275 -#define _gather_118_type 1276 -#define _tmp_120_type 1277 -#define _loop0_122_type 1278 -#define _gather_121_type 1279 -#define _loop0_124_type 1280 -#define _gather_123_type 1281 -#define _tmp_125_type 1282 -#define _loop0_126_type 1283 -#define _tmp_127_type 1284 -#define _loop0_128_type 1285 -#define _loop0_129_type 1286 -#define _tmp_130_type 1287 -#define _tmp_131_type 1288 -#define _loop0_132_type 1289 -#define _tmp_133_type 1290 -#define _loop0_134_type 1291 -#define _tmp_135_type 1292 -#define _tmp_136_type 1293 -#define _tmp_137_type 1294 -#define _tmp_138_type 1295 -#define _tmp_139_type 1296 -#define _tmp_140_type 1297 -#define _tmp_141_type 1298 -#define _tmp_142_type 1299 -#define _tmp_143_type 1300 -#define _tmp_144_type 1301 -#define _tmp_145_type 1302 -#define _tmp_146_type 1303 -#define _tmp_147_type 1304 -#define _tmp_148_type 1305 -#define _tmp_149_type 1306 -#define _tmp_150_type 1307 -#define _loop1_151_type 1308 -#define _loop1_152_type 1309 -#define _tmp_153_type 1310 -#define _tmp_154_type 1311 +#define invalid_with_item_type 1156 +#define invalid_for_target_type 1157 +#define invalid_group_type 1158 +#define invalid_import_from_targets_type 1159 +#define _loop0_1_type 1160 +#define _loop0_2_type 1161 +#define _loop0_4_type 1162 +#define _gather_3_type 1163 +#define _loop0_6_type 1164 +#define _gather_5_type 1165 +#define _loop0_8_type 1166 +#define _gather_7_type 1167 +#define _loop0_10_type 1168 +#define _gather_9_type 1169 +#define _loop1_11_type 1170 +#define _loop0_13_type 1171 +#define _gather_12_type 1172 +#define _tmp_14_type 1173 +#define _tmp_15_type 1174 +#define _tmp_16_type 1175 +#define _tmp_17_type 1176 +#define _tmp_18_type 1177 +#define _tmp_19_type 1178 +#define _tmp_20_type 1179 +#define _tmp_21_type 1180 +#define _loop1_22_type 1181 +#define _tmp_23_type 1182 +#define _tmp_24_type 1183 +#define _loop0_26_type 1184 +#define _gather_25_type 1185 +#define _loop0_28_type 1186 +#define _gather_27_type 1187 +#define _tmp_29_type 1188 +#define _tmp_30_type 1189 +#define _loop0_31_type 1190 +#define _loop1_32_type 1191 +#define _loop0_34_type 1192 +#define _gather_33_type 1193 +#define _tmp_35_type 1194 +#define _loop0_37_type 1195 +#define _gather_36_type 1196 +#define _tmp_38_type 1197 +#define _loop0_40_type 1198 +#define _gather_39_type 1199 +#define _loop0_42_type 1200 +#define _gather_41_type 1201 +#define _loop0_44_type 1202 +#define _gather_43_type 1203 +#define _loop0_46_type 1204 +#define _gather_45_type 1205 +#define _tmp_47_type 1206 +#define _loop1_48_type 1207 +#define _tmp_49_type 1208 +#define _tmp_50_type 1209 +#define _tmp_51_type 1210 +#define _tmp_52_type 1211 +#define _tmp_53_type 1212 +#define _loop0_54_type 1213 +#define _loop0_55_type 1214 +#define _loop0_56_type 1215 +#define _loop1_57_type 1216 +#define _loop0_58_type 1217 +#define _loop1_59_type 1218 +#define _loop1_60_type 1219 +#define _loop1_61_type 1220 +#define _loop0_62_type 1221 +#define _loop1_63_type 1222 +#define _loop0_64_type 1223 +#define _loop1_65_type 1224 +#define _loop0_66_type 1225 +#define _loop1_67_type 1226 +#define _loop1_68_type 1227 +#define _tmp_69_type 1228 +#define _loop0_71_type 1229 +#define _gather_70_type 1230 +#define _loop1_72_type 1231 +#define _loop0_74_type 1232 +#define _gather_73_type 1233 +#define _loop1_75_type 1234 +#define _loop0_76_type 1235 +#define _loop0_77_type 1236 +#define _loop0_78_type 1237 +#define _loop1_79_type 1238 +#define _loop0_80_type 1239 +#define _loop1_81_type 1240 +#define _loop1_82_type 1241 +#define _loop1_83_type 1242 +#define _loop0_84_type 1243 +#define _loop1_85_type 1244 +#define _loop0_86_type 1245 +#define _loop1_87_type 1246 +#define _loop0_88_type 1247 +#define _loop1_89_type 1248 +#define _loop1_90_type 1249 +#define _loop1_91_type 1250 +#define _loop1_92_type 1251 +#define _tmp_93_type 1252 +#define _loop0_95_type 1253 +#define _gather_94_type 1254 +#define _tmp_96_type 1255 +#define _tmp_97_type 1256 +#define _tmp_98_type 1257 +#define _tmp_99_type 1258 +#define _loop1_100_type 1259 +#define _tmp_101_type 1260 +#define _tmp_102_type 1261 +#define _loop0_104_type 1262 +#define _gather_103_type 1263 +#define _loop1_105_type 1264 +#define _loop0_106_type 1265 +#define _loop0_107_type 1266 +#define _tmp_108_type 1267 +#define _tmp_109_type 1268 +#define _loop0_111_type 1269 +#define _gather_110_type 1270 +#define _loop0_113_type 1271 +#define _gather_112_type 1272 +#define _loop0_115_type 1273 +#define _gather_114_type 1274 +#define _loop0_117_type 1275 +#define _gather_116_type 1276 +#define _loop0_118_type 1277 +#define _loop0_120_type 1278 +#define _gather_119_type 1279 +#define _tmp_121_type 1280 +#define _loop0_123_type 1281 +#define _gather_122_type 1282 +#define _loop0_125_type 1283 +#define _gather_124_type 1284 +#define _tmp_126_type 1285 +#define _loop0_127_type 1286 +#define _tmp_128_type 1287 +#define _loop0_129_type 1288 +#define _loop0_130_type 1289 +#define _tmp_131_type 1290 +#define _tmp_132_type 1291 +#define _loop0_133_type 1292 +#define _tmp_134_type 1293 +#define _loop0_135_type 1294 +#define _tmp_136_type 1295 +#define _tmp_137_type 1296 +#define _tmp_138_type 1297 +#define _tmp_139_type 1298 +#define _tmp_140_type 1299 +#define _tmp_141_type 1300 +#define _tmp_142_type 1301 +#define _tmp_143_type 1302 +#define _tmp_144_type 1303 +#define _tmp_145_type 1304 +#define _tmp_146_type 1305 +#define _tmp_147_type 1306 +#define _tmp_148_type 1307 +#define _tmp_149_type 1308 +#define _tmp_150_type 1309 +#define _tmp_151_type 1310 +#define _loop1_152_type 1311 +#define _loop1_153_type 1312 +#define _tmp_154_type 1313 +#define _tmp_155_type 1314 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -517,7 +520,6 @@ static expr_ty single_subscript_attribute_target_rule(Parser *p); static asdl_seq* del_targets_rule(Parser *p); static expr_ty del_target_rule(Parser *p); static expr_ty del_t_atom_rule(Parser *p); -static void *del_target_end_rule(Parser *p); static asdl_seq* targets_rule(Parser *p); static expr_ty target_rule(Parser *p); static expr_ty t_primary_rule(Parser *p); @@ -527,6 +529,7 @@ static void *incorrect_arguments_rule(Parser *p); static void *invalid_kwarg_rule(Parser *p); static void *invalid_named_expression_rule(Parser *p); static void *invalid_assignment_rule(Parser *p); +static void *invalid_del_stmt_rule(Parser *p); static void *invalid_block_rule(Parser *p); static void *invalid_comprehension_rule(Parser *p); static void *invalid_dict_comprehension_rule(Parser *p); @@ -535,7 +538,9 @@ static void *invalid_lambda_parameters_rule(Parser *p); static void *invalid_star_etc_rule(Parser *p); static void *invalid_lambda_star_etc_rule(Parser *p); static void *invalid_double_type_comments_rule(Parser *p); -static void *invalid_del_target_rule(Parser *p); +static void *invalid_with_item_rule(Parser *p); +static void *invalid_for_target_rule(Parser *p); +static void *invalid_group_rule(Parser *p); static void *invalid_import_from_targets_rule(Parser *p); static asdl_seq *_loop0_1_rule(Parser *p); static asdl_seq *_loop0_2_rule(Parser *p); @@ -566,112 +571,112 @@ static asdl_seq *_gather_25_rule(Parser *p); static asdl_seq *_loop0_28_rule(Parser *p); static asdl_seq *_gather_27_rule(Parser *p); static void *_tmp_29_rule(Parser *p); -static asdl_seq *_loop0_30_rule(Parser *p); -static asdl_seq *_loop1_31_rule(Parser *p); -static asdl_seq *_loop0_33_rule(Parser *p); -static asdl_seq *_gather_32_rule(Parser *p); -static void *_tmp_34_rule(Parser *p); -static asdl_seq *_loop0_36_rule(Parser *p); -static asdl_seq *_gather_35_rule(Parser *p); -static void *_tmp_37_rule(Parser *p); -static asdl_seq *_loop0_39_rule(Parser *p); -static asdl_seq *_gather_38_rule(Parser *p); -static asdl_seq *_loop0_41_rule(Parser *p); -static asdl_seq *_gather_40_rule(Parser *p); -static asdl_seq *_loop0_43_rule(Parser *p); -static asdl_seq *_gather_42_rule(Parser *p); -static asdl_seq *_loop0_45_rule(Parser *p); -static asdl_seq *_gather_44_rule(Parser *p); -static void *_tmp_46_rule(Parser *p); -static asdl_seq *_loop1_47_rule(Parser *p); -static void *_tmp_48_rule(Parser *p); +static void *_tmp_30_rule(Parser *p); +static asdl_seq *_loop0_31_rule(Parser *p); +static asdl_seq *_loop1_32_rule(Parser *p); +static asdl_seq *_loop0_34_rule(Parser *p); +static asdl_seq *_gather_33_rule(Parser *p); +static void *_tmp_35_rule(Parser *p); +static asdl_seq *_loop0_37_rule(Parser *p); +static asdl_seq *_gather_36_rule(Parser *p); +static void *_tmp_38_rule(Parser *p); +static asdl_seq *_loop0_40_rule(Parser *p); +static asdl_seq *_gather_39_rule(Parser *p); +static asdl_seq *_loop0_42_rule(Parser *p); +static asdl_seq *_gather_41_rule(Parser *p); +static asdl_seq *_loop0_44_rule(Parser *p); +static asdl_seq *_gather_43_rule(Parser *p); +static asdl_seq *_loop0_46_rule(Parser *p); +static asdl_seq *_gather_45_rule(Parser *p); +static void *_tmp_47_rule(Parser *p); +static asdl_seq *_loop1_48_rule(Parser *p); static void *_tmp_49_rule(Parser *p); static void *_tmp_50_rule(Parser *p); static void *_tmp_51_rule(Parser *p); static void *_tmp_52_rule(Parser *p); -static asdl_seq *_loop0_53_rule(Parser *p); +static void *_tmp_53_rule(Parser *p); static asdl_seq *_loop0_54_rule(Parser *p); static asdl_seq *_loop0_55_rule(Parser *p); -static asdl_seq *_loop1_56_rule(Parser *p); -static asdl_seq *_loop0_57_rule(Parser *p); -static asdl_seq *_loop1_58_rule(Parser *p); +static asdl_seq *_loop0_56_rule(Parser *p); +static asdl_seq *_loop1_57_rule(Parser *p); +static asdl_seq *_loop0_58_rule(Parser *p); static asdl_seq *_loop1_59_rule(Parser *p); static asdl_seq *_loop1_60_rule(Parser *p); -static asdl_seq *_loop0_61_rule(Parser *p); -static asdl_seq *_loop1_62_rule(Parser *p); -static asdl_seq *_loop0_63_rule(Parser *p); -static asdl_seq *_loop1_64_rule(Parser *p); -static asdl_seq *_loop0_65_rule(Parser *p); -static asdl_seq *_loop1_66_rule(Parser *p); +static asdl_seq *_loop1_61_rule(Parser *p); +static asdl_seq *_loop0_62_rule(Parser *p); +static asdl_seq *_loop1_63_rule(Parser *p); +static asdl_seq *_loop0_64_rule(Parser *p); +static asdl_seq *_loop1_65_rule(Parser *p); +static asdl_seq *_loop0_66_rule(Parser *p); static asdl_seq *_loop1_67_rule(Parser *p); -static void *_tmp_68_rule(Parser *p); -static asdl_seq *_loop0_70_rule(Parser *p); -static asdl_seq *_gather_69_rule(Parser *p); -static asdl_seq *_loop1_71_rule(Parser *p); -static asdl_seq *_loop0_73_rule(Parser *p); -static asdl_seq *_gather_72_rule(Parser *p); -static asdl_seq *_loop1_74_rule(Parser *p); -static asdl_seq *_loop0_75_rule(Parser *p); +static asdl_seq *_loop1_68_rule(Parser *p); +static void *_tmp_69_rule(Parser *p); +static asdl_seq *_loop0_71_rule(Parser *p); +static asdl_seq *_gather_70_rule(Parser *p); +static asdl_seq *_loop1_72_rule(Parser *p); +static asdl_seq *_loop0_74_rule(Parser *p); +static asdl_seq *_gather_73_rule(Parser *p); +static asdl_seq *_loop1_75_rule(Parser *p); static asdl_seq *_loop0_76_rule(Parser *p); static asdl_seq *_loop0_77_rule(Parser *p); -static asdl_seq *_loop1_78_rule(Parser *p); -static asdl_seq *_loop0_79_rule(Parser *p); -static asdl_seq *_loop1_80_rule(Parser *p); +static asdl_seq *_loop0_78_rule(Parser *p); +static asdl_seq *_loop1_79_rule(Parser *p); +static asdl_seq *_loop0_80_rule(Parser *p); static asdl_seq *_loop1_81_rule(Parser *p); static asdl_seq *_loop1_82_rule(Parser *p); -static asdl_seq *_loop0_83_rule(Parser *p); -static asdl_seq *_loop1_84_rule(Parser *p); -static asdl_seq *_loop0_85_rule(Parser *p); -static asdl_seq *_loop1_86_rule(Parser *p); -static asdl_seq *_loop0_87_rule(Parser *p); -static asdl_seq *_loop1_88_rule(Parser *p); +static asdl_seq *_loop1_83_rule(Parser *p); +static asdl_seq *_loop0_84_rule(Parser *p); +static asdl_seq *_loop1_85_rule(Parser *p); +static asdl_seq *_loop0_86_rule(Parser *p); +static asdl_seq *_loop1_87_rule(Parser *p); +static asdl_seq *_loop0_88_rule(Parser *p); static asdl_seq *_loop1_89_rule(Parser *p); static asdl_seq *_loop1_90_rule(Parser *p); static asdl_seq *_loop1_91_rule(Parser *p); -static void *_tmp_92_rule(Parser *p); -static asdl_seq *_loop0_94_rule(Parser *p); -static asdl_seq *_gather_93_rule(Parser *p); -static void *_tmp_95_rule(Parser *p); +static asdl_seq *_loop1_92_rule(Parser *p); +static void *_tmp_93_rule(Parser *p); +static asdl_seq *_loop0_95_rule(Parser *p); +static asdl_seq *_gather_94_rule(Parser *p); static void *_tmp_96_rule(Parser *p); static void *_tmp_97_rule(Parser *p); static void *_tmp_98_rule(Parser *p); -static asdl_seq *_loop1_99_rule(Parser *p); -static void *_tmp_100_rule(Parser *p); +static void *_tmp_99_rule(Parser *p); +static asdl_seq *_loop1_100_rule(Parser *p); static void *_tmp_101_rule(Parser *p); -static asdl_seq *_loop0_103_rule(Parser *p); -static asdl_seq *_gather_102_rule(Parser *p); -static asdl_seq *_loop1_104_rule(Parser *p); -static asdl_seq *_loop0_105_rule(Parser *p); +static void *_tmp_102_rule(Parser *p); +static asdl_seq *_loop0_104_rule(Parser *p); +static asdl_seq *_gather_103_rule(Parser *p); +static asdl_seq *_loop1_105_rule(Parser *p); static asdl_seq *_loop0_106_rule(Parser *p); -static void *_tmp_107_rule(Parser *p); +static asdl_seq *_loop0_107_rule(Parser *p); static void *_tmp_108_rule(Parser *p); -static asdl_seq *_loop0_110_rule(Parser *p); -static asdl_seq *_gather_109_rule(Parser *p); -static asdl_seq *_loop0_112_rule(Parser *p); -static asdl_seq *_gather_111_rule(Parser *p); -static asdl_seq *_loop0_114_rule(Parser *p); -static asdl_seq *_gather_113_rule(Parser *p); -static asdl_seq *_loop0_116_rule(Parser *p); -static asdl_seq *_gather_115_rule(Parser *p); +static void *_tmp_109_rule(Parser *p); +static asdl_seq *_loop0_111_rule(Parser *p); +static asdl_seq *_gather_110_rule(Parser *p); +static asdl_seq *_loop0_113_rule(Parser *p); +static asdl_seq *_gather_112_rule(Parser *p); +static asdl_seq *_loop0_115_rule(Parser *p); +static asdl_seq *_gather_114_rule(Parser *p); static asdl_seq *_loop0_117_rule(Parser *p); -static asdl_seq *_loop0_119_rule(Parser *p); -static asdl_seq *_gather_118_rule(Parser *p); -static void *_tmp_120_rule(Parser *p); -static asdl_seq *_loop0_122_rule(Parser *p); -static asdl_seq *_gather_121_rule(Parser *p); -static asdl_seq *_loop0_124_rule(Parser *p); -static asdl_seq *_gather_123_rule(Parser *p); -static void *_tmp_125_rule(Parser *p); -static asdl_seq *_loop0_126_rule(Parser *p); -static void *_tmp_127_rule(Parser *p); -static asdl_seq *_loop0_128_rule(Parser *p); +static asdl_seq *_gather_116_rule(Parser *p); +static asdl_seq *_loop0_118_rule(Parser *p); +static asdl_seq *_loop0_120_rule(Parser *p); +static asdl_seq *_gather_119_rule(Parser *p); +static void *_tmp_121_rule(Parser *p); +static asdl_seq *_loop0_123_rule(Parser *p); +static asdl_seq *_gather_122_rule(Parser *p); +static asdl_seq *_loop0_125_rule(Parser *p); +static asdl_seq *_gather_124_rule(Parser *p); +static void *_tmp_126_rule(Parser *p); +static asdl_seq *_loop0_127_rule(Parser *p); +static void *_tmp_128_rule(Parser *p); static asdl_seq *_loop0_129_rule(Parser *p); -static void *_tmp_130_rule(Parser *p); +static asdl_seq *_loop0_130_rule(Parser *p); static void *_tmp_131_rule(Parser *p); -static asdl_seq *_loop0_132_rule(Parser *p); -static void *_tmp_133_rule(Parser *p); -static asdl_seq *_loop0_134_rule(Parser *p); -static void *_tmp_135_rule(Parser *p); +static void *_tmp_132_rule(Parser *p); +static asdl_seq *_loop0_133_rule(Parser *p); +static void *_tmp_134_rule(Parser *p); +static asdl_seq *_loop0_135_rule(Parser *p); static void *_tmp_136_rule(Parser *p); static void *_tmp_137_rule(Parser *p); static void *_tmp_138_rule(Parser *p); @@ -687,10 +692,11 @@ static void *_tmp_147_rule(Parser *p); static void *_tmp_148_rule(Parser *p); static void *_tmp_149_rule(Parser *p); static void *_tmp_150_rule(Parser *p); -static asdl_seq *_loop1_151_rule(Parser *p); +static void *_tmp_151_rule(Parser *p); static asdl_seq *_loop1_152_rule(Parser *p); -static void *_tmp_153_rule(Parser *p); +static asdl_seq *_loop1_153_rule(Parser *p); static void *_tmp_154_rule(Parser *p); +static void *_tmp_155_rule(Parser *p); // file: statements? $ @@ -2006,7 +2012,7 @@ compound_stmt_rule(Parser *p) // | NAME ':' expression ['=' annotated_rhs] // | ('(' single_target ')' | single_subscript_attribute_target) ':' expression ['=' annotated_rhs] // | ((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT? -// | single_target augassign (yield_expr | star_expressions) +// | single_target augassign ~ (yield_expr | star_expressions) // | invalid_assignment static stmt_ty assignment_rule(Parser *p) @@ -2152,12 +2158,13 @@ assignment_rule(Parser *p) D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "((star_targets '='))+ (yield_expr | star_expressions) !'=' TYPE_COMMENT?")); } - { // single_target augassign (yield_expr | star_expressions) + { // single_target augassign ~ (yield_expr | star_expressions) if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); + D(fprintf(stderr, "%*c> assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "single_target augassign ~ (yield_expr | star_expressions)")); + int _cut_var = 0; expr_ty a; AugOperator* b; void *c; @@ -2166,10 +2173,12 @@ assignment_rule(Parser *p) && (b = augassign_rule(p)) // augassign && + (_cut_var = 1) + && (c = _tmp_24_rule(p)) // yield_expr | star_expressions ) { - D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); + D(fprintf(stderr, "%*c+ assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "single_target augassign ~ (yield_expr | star_expressions)")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -2189,7 +2198,11 @@ assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_target augassign (yield_expr | star_expressions)")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "single_target augassign ~ (yield_expr | star_expressions)")); + if (_cut_var) { + D(p->level--); + return NULL; + } } { // invalid_assignment if (p->error_indicator) { @@ -2806,7 +2819,7 @@ assert_stmt_rule(Parser *p) return _res; } -// del_stmt: 'del' del_targets +// del_stmt: 'del' del_targets &(';' | NEWLINE) | invalid_del_stmt static stmt_ty del_stmt_rule(Parser *p) { @@ -2826,21 +2839,23 @@ del_stmt_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'del' del_targets + { // 'del' del_targets &(';' | NEWLINE) if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> del_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); + D(fprintf(stderr, "%*c> del_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'del' del_targets &(';' | NEWLINE)")); Token * _keyword; asdl_seq* a; if ( (_keyword = _PyPegen_expect_token(p, 503)) // token='del' && (a = del_targets_rule(p)) // del_targets + && + _PyPegen_lookahead(1, _tmp_30_rule, p) ) { - D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets")); + D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' del_targets &(';' | NEWLINE)")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -2860,7 +2875,26 @@ del_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s del_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'del' del_targets")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'del' del_targets &(';' | NEWLINE)")); + } + { // invalid_del_stmt + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> del_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_del_stmt")); + void *invalid_del_stmt_var; + if ( + (invalid_del_stmt_var = invalid_del_stmt_rule(p)) // invalid_del_stmt + ) + { + D(fprintf(stderr, "%*c+ del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_del_stmt")); + _res = invalid_del_stmt_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s del_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_del_stmt")); } _res = NULL; done: @@ -3021,7 +3055,7 @@ import_from_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop0_30_rule(p)) // (('.' | '...'))* + (a = _loop0_31_rule(p)) // (('.' | '...'))* && (b = dotted_name_rule(p)) // dotted_name && @@ -3065,7 +3099,7 @@ import_from_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 514)) // token='from' && - (a = _loop1_31_rule(p)) // (('.' | '...'))+ + (a = _loop1_32_rule(p)) // (('.' | '...'))+ && (_keyword_1 = _PyPegen_expect_token(p, 513)) // token='import' && @@ -3238,7 +3272,7 @@ import_from_as_names_rule(Parser *p) D(fprintf(stderr, "%*c> import_from_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); asdl_seq * a; if ( - (a = _gather_32_rule(p)) // ','.import_from_as_name+ + (a = _gather_33_rule(p)) // ','.import_from_as_name+ ) { D(fprintf(stderr, "%*c+ import_from_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.import_from_as_name+")); @@ -3282,7 +3316,7 @@ import_from_as_name_rule(Parser *p) if ( (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_34_rule(p), 1) // ['as' NAME] + (b = _tmp_35_rule(p), 1) // ['as' NAME] ) { D(fprintf(stderr, "%*c+ import_from_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ['as' NAME]")); @@ -3323,7 +3357,7 @@ dotted_as_names_rule(Parser *p) D(fprintf(stderr, "%*c> dotted_as_names[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); asdl_seq * a; if ( - (a = _gather_35_rule(p)) // ','.dotted_as_name+ + (a = _gather_36_rule(p)) // ','.dotted_as_name+ ) { D(fprintf(stderr, "%*c+ dotted_as_names[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.dotted_as_name+")); @@ -3367,7 +3401,7 @@ dotted_as_name_rule(Parser *p) if ( (a = dotted_name_rule(p)) // dotted_name && - (b = _tmp_37_rule(p), 1) // ['as' NAME] + (b = _tmp_38_rule(p), 1) // ['as' NAME] ) { D(fprintf(stderr, "%*c+ dotted_as_name[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_name ['as' NAME]")); @@ -3840,8 +3874,9 @@ while_stmt_rule(Parser *p) } // for_stmt: -// | 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? -// | ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? +// | 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? +// | ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? +// | invalid_for_target static stmt_ty for_stmt_rule(Parser *p) { @@ -3861,12 +3896,13 @@ for_stmt_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + { // 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); + int _cut_var = 0; Token * _keyword; Token * _keyword_1; Token * _literal; @@ -3882,6 +3918,8 @@ for_stmt_rule(Parser *p) && (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && + (_cut_var = 1) + && (ex = star_expressions_rule(p)) // star_expressions && (_literal = _PyPegen_expect_token(p, 11)) // token=':' @@ -3893,7 +3931,7 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { - D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -3913,14 +3951,19 @@ for_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); + if (_cut_var) { + D(p->level--); + return NULL; + } } - { // ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block? + { // ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block? if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); + int _cut_var = 0; Token * _keyword; Token * _keyword_1; Token * _literal; @@ -3939,6 +3982,8 @@ for_stmt_rule(Parser *p) && (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && + (_cut_var = 1) + && (ex = star_expressions_rule(p)) // star_expressions && (_literal = _PyPegen_expect_token(p, 11)) // token=':' @@ -3950,7 +3995,7 @@ for_stmt_rule(Parser *p) (el = else_block_rule(p), 1) // else_block? ) { - D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -3970,7 +4015,30 @@ for_stmt_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' star_expressions ':' TYPE_COMMENT? block else_block?")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ star_expressions ':' TYPE_COMMENT? block else_block?")); + if (_cut_var) { + D(p->level--); + return NULL; + } + } + { // invalid_for_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> for_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_for_target")); + void *invalid_for_target_var; + if ( + (invalid_for_target_var = invalid_for_target_rule(p)) // invalid_for_target + ) + { + D(fprintf(stderr, "%*c+ for_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_for_target")); + _res = invalid_for_target_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_for_target")); } _res = NULL; done: @@ -4021,7 +4089,7 @@ with_stmt_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _gather_38_rule(p)) // ','.with_item+ + (a = _gather_39_rule(p)) // ','.with_item+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && @@ -4068,7 +4136,7 @@ with_stmt_rule(Parser *p) if ( (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_40_rule(p)) // ','.with_item+ + (a = _gather_41_rule(p)) // ','.with_item+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4121,7 +4189,7 @@ with_stmt_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _gather_42_rule(p)) // ','.with_item+ + (a = _gather_43_rule(p)) // ','.with_item+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? && @@ -4171,7 +4239,7 @@ with_stmt_rule(Parser *p) && (_keyword = _PyPegen_expect_token(p, 519)) // token='with' && - (a = _gather_44_rule(p)) // ','.with_item+ + (a = _gather_45_rule(p)) // ','.with_item+ && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4208,7 +4276,7 @@ with_stmt_rule(Parser *p) return _res; } -// with_item: expression ['as' target] +// with_item: expression 'as' target &(',' | ')' | ':') | invalid_with_item | expression static withitem_ty with_item_rule(Parser *p) { @@ -4219,22 +4287,70 @@ with_item_rule(Parser *p) } withitem_ty _res = NULL; int _mark = p->mark; - { // expression ['as' target] + { // expression 'as' target &(',' | ')' | ':') if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); + D(fprintf(stderr, "%*c> with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression 'as' target &(',' | ')' | ':')")); + Token * _keyword; expr_ty e; - void *o; + expr_ty t; if ( (e = expression_rule(p)) // expression && - (o = _tmp_46_rule(p), 1) // ['as' target] + (_keyword = _PyPegen_expect_token(p, 520)) // token='as' + && + (t = target_rule(p)) // target + && + _PyPegen_lookahead(1, _tmp_47_rule, p) + ) + { + D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' target &(',' | ')' | ':')")); + _res = _Py_withitem ( e , t , p -> arena ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s with_item[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression 'as' target &(',' | ')' | ':')")); + } + { // invalid_with_item + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_with_item")); + void *invalid_with_item_var; + if ( + (invalid_with_item_var = invalid_with_item_rule(p)) // invalid_with_item + ) + { + D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_with_item")); + _res = invalid_with_item_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s with_item[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_with_item")); + } + { // expression + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression")); + expr_ty e; + if ( + (e = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ['as' target]")); - _res = _Py_withitem ( e , o , p -> arena ); + D(fprintf(stderr, "%*c+ with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression")); + _res = _Py_withitem ( e , NULL , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); @@ -4244,7 +4360,7 @@ with_item_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s with_item[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ['as' target]")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression")); } _res = NULL; done: @@ -4335,7 +4451,7 @@ try_stmt_rule(Parser *p) && (b = block_rule(p)) // block && - (ex = _loop1_47_rule(p)) // except_block+ + (ex = _loop1_48_rule(p)) // except_block+ && (el = else_block_rule(p), 1) // else_block? && @@ -4402,11 +4518,11 @@ except_block_rule(Parser *p) expr_ty e; void *t; if ( - (_keyword = _PyPegen_expect_token(p, 520)) // token='except' + (_keyword = _PyPegen_expect_token(p, 521)) // token='except' && (e = expression_rule(p)) // expression && - (t = _tmp_48_rule(p), 1) // ['as' NAME] + (t = _tmp_49_rule(p), 1) // ['as' NAME] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4445,7 +4561,7 @@ except_block_rule(Parser *p) Token * _literal; asdl_seq* b; if ( - (_keyword = _PyPegen_expect_token(p, 520)) // token='except' + (_keyword = _PyPegen_expect_token(p, 521)) // token='except' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4501,7 +4617,7 @@ finally_block_rule(Parser *p) Token * _literal; asdl_seq* a; if ( - (_keyword = _PyPegen_expect_token(p, 521)) // token='finally' + (_keyword = _PyPegen_expect_token(p, 522)) // token='finally' && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4623,7 +4739,7 @@ raise_stmt_rule(Parser *p) && (a = expression_rule(p)) // expression && - (b = _tmp_49_rule(p), 1) // ['from' expression] + (b = _tmp_50_rule(p), 1) // ['from' expression] ) { D(fprintf(stderr, "%*c+ raise_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'raise' expression ['from' expression]")); @@ -4788,7 +4904,7 @@ function_def_raw_rule(Parser *p) void *params; void *tc; if ( - (_keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 523)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4798,7 +4914,7 @@ function_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_50_rule(p), 1) // ['->' expression] + (a = _tmp_51_rule(p), 1) // ['->' expression] && (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4848,7 +4964,7 @@ function_def_raw_rule(Parser *p) if ( (async_var = _PyPegen_expect_token(p, ASYNC)) // token='ASYNC' && - (_keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 523)) // token='def' && (n = _PyPegen_name_token(p)) // NAME && @@ -4858,7 +4974,7 @@ function_def_raw_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' && - (a = _tmp_51_rule(p), 1) // ['->' expression] + (a = _tmp_52_rule(p), 1) // ['->' expression] && (_literal_2 = _PyPegen_expect_token(p, 11)) // token=':' && @@ -4922,7 +5038,7 @@ func_type_comment_rule(Parser *p) && (t = _PyPegen_expect_token(p, TYPE_COMMENT)) // token='TYPE_COMMENT' && - _PyPegen_lookahead(1, _tmp_52_rule, p) + _PyPegen_lookahead(1, _tmp_53_rule, p) ) { D(fprintf(stderr, "%*c+ func_type_comment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE TYPE_COMMENT &(NEWLINE INDENT)")); @@ -5066,9 +5182,9 @@ parameters_rule(Parser *p) if ( (a = slash_no_default_rule(p)) // slash_no_default && - (b = _loop0_53_rule(p)) // param_no_default* + (b = _loop0_54_rule(p)) // param_no_default* && - (c = _loop0_54_rule(p)) // param_with_default* + (c = _loop0_55_rule(p)) // param_with_default* && (d = star_etc_rule(p), 1) // star_etc? ) @@ -5098,7 +5214,7 @@ parameters_rule(Parser *p) if ( (a = slash_with_default_rule(p)) // slash_with_default && - (b = _loop0_55_rule(p)) // param_with_default* + (b = _loop0_56_rule(p)) // param_with_default* && (c = star_etc_rule(p), 1) // star_etc? ) @@ -5126,9 +5242,9 @@ parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = _loop1_56_rule(p)) // param_no_default+ + (a = _loop1_57_rule(p)) // param_no_default+ && - (b = _loop0_57_rule(p)) // param_with_default* + (b = _loop0_58_rule(p)) // param_with_default* && (c = star_etc_rule(p), 1) // star_etc? ) @@ -5155,7 +5271,7 @@ parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_58_rule(p)) // param_with_default+ + (a = _loop1_59_rule(p)) // param_with_default+ && (b = star_etc_rule(p), 1) // star_etc? ) @@ -5224,7 +5340,7 @@ slash_no_default_rule(Parser *p) Token * _literal_1; asdl_seq * a; if ( - (a = _loop1_59_rule(p)) // param_no_default+ + (a = _loop1_60_rule(p)) // param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -5253,7 +5369,7 @@ slash_no_default_rule(Parser *p) Token * _literal; asdl_seq * a; if ( - (a = _loop1_60_rule(p)) // param_no_default+ + (a = _loop1_61_rule(p)) // param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -5303,9 +5419,9 @@ slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_61_rule(p)) // param_no_default* + (a = _loop0_62_rule(p)) // param_no_default* && - (b = _loop1_62_rule(p)) // param_with_default+ + (b = _loop1_63_rule(p)) // param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -5335,9 +5451,9 @@ slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_63_rule(p)) // param_no_default* + (a = _loop0_64_rule(p)) // param_no_default* && - (b = _loop1_64_rule(p)) // param_with_default+ + (b = _loop1_65_rule(p)) // param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -5393,7 +5509,7 @@ star_etc_rule(Parser *p) && (a = param_no_default_rule(p)) // param_no_default && - (b = _loop0_65_rule(p)) // param_maybe_default* + (b = _loop0_66_rule(p)) // param_maybe_default* && (c = kwds_rule(p), 1) // kwds? ) @@ -5426,7 +5542,7 @@ star_etc_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_66_rule(p)) // param_maybe_default+ + (b = _loop1_67_rule(p)) // param_maybe_default+ && (c = kwds_rule(p), 1) // kwds? ) @@ -5948,7 +6064,7 @@ decorators_rule(Parser *p) D(fprintf(stderr, "%*c> decorators[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); asdl_seq * a; if ( - (a = _loop1_67_rule(p)) // (('@' named_expression NEWLINE))+ + (a = _loop1_68_rule(p)) // (('@' named_expression NEWLINE))+ ) { D(fprintf(stderr, "%*c+ decorators[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "(('@' named_expression NEWLINE))+")); @@ -6065,11 +6181,11 @@ class_def_raw_rule(Parser *p) void *b; asdl_seq* c; if ( - (_keyword = _PyPegen_expect_token(p, 523)) // token='class' + (_keyword = _PyPegen_expect_token(p, 524)) // token='class' && (a = _PyPegen_name_token(p)) // NAME && - (b = _tmp_68_rule(p), 1) // ['(' arguments? ')'] + (b = _tmp_69_rule(p), 1) // ['(' arguments? ')'] && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && @@ -6218,7 +6334,7 @@ expressions_list_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_69_rule(p)) // ','.star_expression+ + (a = _gather_70_rule(p)) // ','.star_expression+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -6278,7 +6394,7 @@ star_expressions_rule(Parser *p) if ( (a = star_expression_rule(p)) // star_expression && - (b = _loop1_71_rule(p)) // ((',' star_expression))+ + (b = _loop1_72_rule(p)) // ((',' star_expression))+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -6473,7 +6589,7 @@ star_named_expressions_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_72_rule(p)) // ','.star_named_expression+ + (a = _gather_73_rule(p)) // ','.star_named_expression+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -6578,7 +6694,7 @@ star_named_expression_rule(Parser *p) return _res; } -// named_expression: NAME ':=' expression | expression !':=' | invalid_named_expression +// named_expression: NAME ':=' ~ expression | expression !':=' | invalid_named_expression static expr_ty named_expression_rule(Parser *p) { @@ -6598,12 +6714,13 @@ named_expression_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME ':=' expression + { // NAME ':=' ~ expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); + D(fprintf(stderr, "%*c> named_expression[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME ':=' ~ expression")); + int _cut_var = 0; Token * _literal; expr_ty a; expr_ty b; @@ -6612,10 +6729,12 @@ named_expression_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 53)) // token=':=' && + (_cut_var = 1) + && (b = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':=' expression")); + D(fprintf(stderr, "%*c+ named_expression[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME ':=' ~ expression")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -6635,7 +6754,11 @@ named_expression_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s named_expression[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':=' expression")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME ':=' ~ expression")); + if (_cut_var) { + D(p->level--); + return NULL; + } } { // expression !':=' if (p->error_indicator) { @@ -6771,7 +6894,7 @@ expressions_rule(Parser *p) if ( (a = expression_rule(p)) // expression && - (b = _loop1_74_rule(p)) // ((',' expression))+ + (b = _loop1_75_rule(p)) // ((',' expression))+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -7004,7 +7127,7 @@ lambdef_rule(Parser *p) void *a; expr_ty b; if ( - (_keyword = _PyPegen_expect_token(p, 524)) // token='lambda' + (_keyword = _PyPegen_expect_token(p, 525)) // token='lambda' && (a = lambda_params_rule(p), 1) // lambda_params? && @@ -7125,9 +7248,9 @@ lambda_parameters_rule(Parser *p) if ( (a = lambda_slash_no_default_rule(p)) // lambda_slash_no_default && - (b = _loop0_75_rule(p)) // lambda_param_no_default* + (b = _loop0_76_rule(p)) // lambda_param_no_default* && - (c = _loop0_76_rule(p)) // lambda_param_with_default* + (c = _loop0_77_rule(p)) // lambda_param_with_default* && (d = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) @@ -7157,7 +7280,7 @@ lambda_parameters_rule(Parser *p) if ( (a = lambda_slash_with_default_rule(p)) // lambda_slash_with_default && - (b = _loop0_77_rule(p)) // lambda_param_with_default* + (b = _loop0_78_rule(p)) // lambda_param_with_default* && (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) @@ -7185,9 +7308,9 @@ lambda_parameters_rule(Parser *p) asdl_seq * b; void *c; if ( - (a = _loop1_78_rule(p)) // lambda_param_no_default+ + (a = _loop1_79_rule(p)) // lambda_param_no_default+ && - (b = _loop0_79_rule(p)) // lambda_param_with_default* + (b = _loop0_80_rule(p)) // lambda_param_with_default* && (c = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) @@ -7214,7 +7337,7 @@ lambda_parameters_rule(Parser *p) asdl_seq * a; void *b; if ( - (a = _loop1_80_rule(p)) // lambda_param_with_default+ + (a = _loop1_81_rule(p)) // lambda_param_with_default+ && (b = lambda_star_etc_rule(p), 1) // lambda_star_etc? ) @@ -7285,7 +7408,7 @@ lambda_slash_no_default_rule(Parser *p) Token * _literal_1; asdl_seq * a; if ( - (a = _loop1_81_rule(p)) // lambda_param_no_default+ + (a = _loop1_82_rule(p)) // lambda_param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -7314,7 +7437,7 @@ lambda_slash_no_default_rule(Parser *p) Token * _literal; asdl_seq * a; if ( - (a = _loop1_82_rule(p)) // lambda_param_no_default+ + (a = _loop1_83_rule(p)) // lambda_param_no_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -7364,9 +7487,9 @@ lambda_slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_83_rule(p)) // lambda_param_no_default* + (a = _loop0_84_rule(p)) // lambda_param_no_default* && - (b = _loop1_84_rule(p)) // lambda_param_with_default+ + (b = _loop1_85_rule(p)) // lambda_param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -7396,9 +7519,9 @@ lambda_slash_with_default_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _loop0_85_rule(p)) // lambda_param_no_default* + (a = _loop0_86_rule(p)) // lambda_param_no_default* && - (b = _loop1_86_rule(p)) // lambda_param_with_default+ + (b = _loop1_87_rule(p)) // lambda_param_with_default+ && (_literal = _PyPegen_expect_token(p, 17)) // token='/' && @@ -7454,7 +7577,7 @@ lambda_star_etc_rule(Parser *p) && (a = lambda_param_no_default_rule(p)) // lambda_param_no_default && - (b = _loop0_87_rule(p)) // lambda_param_maybe_default* + (b = _loop0_88_rule(p)) // lambda_param_maybe_default* && (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) @@ -7487,7 +7610,7 @@ lambda_star_etc_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _loop1_88_rule(p)) // lambda_param_maybe_default+ + (b = _loop1_89_rule(p)) // lambda_param_maybe_default+ && (c = lambda_kwds_rule(p), 1) // lambda_kwds? ) @@ -7914,7 +8037,7 @@ disjunction_rule(Parser *p) if ( (a = conjunction_rule(p)) // conjunction && - (b = _loop1_89_rule(p)) // (('or' conjunction))+ + (b = _loop1_90_rule(p)) // (('or' conjunction))+ ) { D(fprintf(stderr, "%*c+ disjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "conjunction (('or' conjunction))+")); @@ -8000,7 +8123,7 @@ conjunction_rule(Parser *p) if ( (a = inversion_rule(p)) // inversion && - (b = _loop1_90_rule(p)) // (('and' inversion))+ + (b = _loop1_91_rule(p)) // (('and' inversion))+ ) { D(fprintf(stderr, "%*c+ conjunction[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "inversion (('and' inversion))+")); @@ -8084,7 +8207,7 @@ inversion_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword = _PyPegen_expect_token(p, 526)) // token='not' && (a = inversion_rule(p)) // inversion ) @@ -8168,7 +8291,7 @@ comparison_rule(Parser *p) if ( (a = bitwise_or_rule(p)) // bitwise_or && - (b = _loop1_91_rule(p)) // compare_op_bitwise_or_pair+ + (b = _loop1_92_rule(p)) // compare_op_bitwise_or_pair+ ) { D(fprintf(stderr, "%*c+ comparison[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "bitwise_or compare_op_bitwise_or_pair+")); @@ -8496,10 +8619,10 @@ noteq_bitwise_or_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> noteq_bitwise_or[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('!=') bitwise_or")); - void *_tmp_92_var; + void *_tmp_93_var; expr_ty a; if ( - (_tmp_92_var = _tmp_92_rule(p)) // '!=' + (_tmp_93_var = _tmp_93_rule(p)) // '!=' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -8720,7 +8843,7 @@ notin_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword = _PyPegen_expect_token(p, 526)) // token='not' && (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && @@ -8811,9 +8934,9 @@ isnot_bitwise_or_rule(Parser *p) Token * _keyword_1; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 526)) // token='is' + (_keyword = _PyPegen_expect_token(p, 527)) // token='is' && - (_keyword_1 = _PyPegen_expect_token(p, 525)) // token='not' + (_keyword_1 = _PyPegen_expect_token(p, 526)) // token='not' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -8857,7 +8980,7 @@ is_bitwise_or_rule(Parser *p) Token * _keyword; expr_ty a; if ( - (_keyword = _PyPegen_expect_token(p, 526)) // token='is' + (_keyword = _PyPegen_expect_token(p, 527)) // token='is' && (a = bitwise_or_rule(p)) // bitwise_or ) @@ -10428,7 +10551,7 @@ slices_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_93_rule(p)) // ','.slice+ + (a = _gather_94_rule(p)) // ','.slice+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -10498,7 +10621,7 @@ slice_rule(Parser *p) && (b = expression_rule(p), 1) // expression? && - (c = _tmp_95_rule(p), 1) // [':' expression?] + (c = _tmp_96_rule(p), 1) // [':' expression?] ) { D(fprintf(stderr, "%*c+ slice[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression? ':' expression? [':' expression?]")); @@ -10610,7 +10733,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'True'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 527)) // token='True' + (_keyword = _PyPegen_expect_token(p, 528)) // token='True' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'True'")); @@ -10643,7 +10766,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'False'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 528)) // token='False' + (_keyword = _PyPegen_expect_token(p, 529)) // token='False' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'False'")); @@ -10676,7 +10799,7 @@ atom_rule(Parser *p) D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'None'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 529)) // token='None' + (_keyword = _PyPegen_expect_token(p, 530)) // token='None' ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'None'")); @@ -10747,15 +10870,15 @@ atom_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); - void *_tmp_96_var; + void *_tmp_97_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 7) // token='(' && - (_tmp_96_var = _tmp_96_rule(p)) // tuple | group | genexp + (_tmp_97_var = _tmp_97_rule(p)) // tuple | group | genexp ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'(' (tuple | group | genexp)")); - _res = _tmp_96_var; + _res = _tmp_97_var; goto done; } p->mark = _mark; @@ -10768,15 +10891,15 @@ atom_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); - void *_tmp_97_var; + void *_tmp_98_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 9) // token='[' && - (_tmp_97_var = _tmp_97_rule(p)) // list | listcomp + (_tmp_98_var = _tmp_98_rule(p)) // list | listcomp ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'[' (list | listcomp)")); - _res = _tmp_97_var; + _res = _tmp_98_var; goto done; } p->mark = _mark; @@ -10789,15 +10912,15 @@ atom_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); - void *_tmp_98_var; + void *_tmp_99_var; if ( _PyPegen_lookahead_with_int(1, _PyPegen_expect_token, p, 25) // token='{' && - (_tmp_98_var = _tmp_98_rule(p)) // dict | set | dictcomp | setcomp + (_tmp_99_var = _tmp_99_rule(p)) // dict | set | dictcomp | setcomp ) { D(fprintf(stderr, "%*c+ atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "&'{' (dict | set | dictcomp | setcomp)")); - _res = _tmp_98_var; + _res = _tmp_99_var; goto done; } p->mark = _mark; @@ -10866,7 +10989,7 @@ strings_rule(Parser *p) D(fprintf(stderr, "%*c> strings[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING+")); asdl_seq * a; if ( - (a = _loop1_99_rule(p)) // STRING+ + (a = _loop1_100_rule(p)) // STRING+ ) { D(fprintf(stderr, "%*c+ strings[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "STRING+")); @@ -10954,7 +11077,7 @@ list_rule(Parser *p) return _res; } -// listcomp: '[' named_expression for_if_clauses ']' | invalid_comprehension +// listcomp: '[' named_expression ~ for_if_clauses ']' | invalid_comprehension static expr_ty listcomp_rule(Parser *p) { @@ -10974,12 +11097,13 @@ listcomp_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '[' named_expression for_if_clauses ']' + { // '[' named_expression ~ for_if_clauses ']' if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); + D(fprintf(stderr, "%*c> listcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'[' named_expression ~ for_if_clauses ']'")); + int _cut_var = 0; Token * _literal; Token * _literal_1; expr_ty a; @@ -10989,12 +11113,14 @@ listcomp_rule(Parser *p) && (a = named_expression_rule(p)) // named_expression && + (_cut_var = 1) + && (b = for_if_clauses_rule(p)) // for_if_clauses && (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' ) { - D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' named_expression for_if_clauses ']'")); + D(fprintf(stderr, "%*c+ listcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'[' named_expression ~ for_if_clauses ']'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -11014,7 +11140,11 @@ listcomp_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s listcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' named_expression for_if_clauses ']'")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' named_expression ~ for_if_clauses ']'")); + if (_cut_var) { + D(p->level--); + return NULL; + } } { // invalid_comprehension if (p->error_indicator) { @@ -11073,7 +11203,7 @@ tuple_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_100_rule(p), 1) // [star_named_expression ',' star_named_expressions?] + (a = _tmp_101_rule(p), 1) // [star_named_expression ',' star_named_expressions?] && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -11106,7 +11236,7 @@ tuple_rule(Parser *p) return _res; } -// group: '(' (yield_expr | named_expression) ')' +// group: '(' (yield_expr | named_expression) ')' | invalid_group static expr_ty group_rule(Parser *p) { @@ -11129,7 +11259,7 @@ group_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' && - (a = _tmp_101_rule(p)) // yield_expr | named_expression + (a = _tmp_102_rule(p)) // yield_expr | named_expression && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) @@ -11147,13 +11277,32 @@ group_rule(Parser *p) D(fprintf(stderr, "%*c%s group[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' (yield_expr | named_expression) ')'")); } + { // invalid_group + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> group[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_group")); + void *invalid_group_var; + if ( + (invalid_group_var = invalid_group_rule(p)) // invalid_group + ) + { + D(fprintf(stderr, "%*c+ group[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_group")); + _res = invalid_group_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s group[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_group")); + } _res = NULL; done: D(p->level--); return _res; } -// genexp: '(' expression for_if_clauses ')' | invalid_comprehension +// genexp: '(' expression ~ for_if_clauses ')' | invalid_comprehension static expr_ty genexp_rule(Parser *p) { @@ -11173,12 +11322,13 @@ genexp_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '(' expression for_if_clauses ')' + { // '(' expression ~ for_if_clauses ')' if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); + D(fprintf(stderr, "%*c> genexp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' expression ~ for_if_clauses ')'")); + int _cut_var = 0; Token * _literal; Token * _literal_1; expr_ty a; @@ -11188,12 +11338,14 @@ genexp_rule(Parser *p) && (a = expression_rule(p)) // expression && + (_cut_var = 1) + && (b = for_if_clauses_rule(p)) // for_if_clauses && (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' expression for_if_clauses ')'")); + D(fprintf(stderr, "%*c+ genexp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' expression ~ for_if_clauses ')'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -11213,7 +11365,11 @@ genexp_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s genexp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' expression for_if_clauses ')'")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' expression ~ for_if_clauses ')'")); + if (_cut_var) { + D(p->level--); + return NULL; + } } { // invalid_comprehension if (p->error_indicator) { @@ -11305,7 +11461,7 @@ set_rule(Parser *p) return _res; } -// setcomp: '{' expression for_if_clauses '}' | invalid_comprehension +// setcomp: '{' expression ~ for_if_clauses '}' | invalid_comprehension static expr_ty setcomp_rule(Parser *p) { @@ -11325,12 +11481,13 @@ setcomp_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // '{' expression for_if_clauses '}' + { // '{' expression ~ for_if_clauses '}' if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); + D(fprintf(stderr, "%*c> setcomp[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{' expression ~ for_if_clauses '}'")); + int _cut_var = 0; Token * _literal; Token * _literal_1; expr_ty a; @@ -11340,12 +11497,14 @@ setcomp_rule(Parser *p) && (a = expression_rule(p)) // expression && + (_cut_var = 1) + && (b = for_if_clauses_rule(p)) // for_if_clauses && (_literal_1 = _PyPegen_expect_token(p, 26)) // token='}' ) { - D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expression for_if_clauses '}'")); + D(fprintf(stderr, "%*c+ setcomp[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{' expression ~ for_if_clauses '}'")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -11365,7 +11524,11 @@ setcomp_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s setcomp[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expression for_if_clauses '}'")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{' expression ~ for_if_clauses '}'")); + if (_cut_var) { + D(p->level--); + return NULL; + } } { // invalid_comprehension if (p->error_indicator) { @@ -11565,7 +11728,7 @@ double_starred_kvpairs_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_102_rule(p)) // ','.double_starred_kvpair+ + (a = _gather_103_rule(p)) // ','.double_starred_kvpair+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -11716,13 +11879,13 @@ for_if_clauses_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> for_if_clauses[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); - asdl_seq * _loop1_104_var; + asdl_seq * _loop1_105_var; if ( - (_loop1_104_var = _loop1_104_rule(p)) // for_if_clause+ + (_loop1_105_var = _loop1_105_rule(p)) // for_if_clause+ ) { D(fprintf(stderr, "%*c+ for_if_clauses[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "for_if_clause+")); - _res = _loop1_104_var; + _res = _loop1_105_var; goto done; } p->mark = _mark; @@ -11736,8 +11899,9 @@ for_if_clauses_rule(Parser *p) } // for_if_clause: -// | ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* -// | 'for' star_targets 'in' disjunction (('if' disjunction))* +// | ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))* +// | 'for' star_targets 'in' ~ disjunction (('if' disjunction))* +// | invalid_for_target static comprehension_ty for_if_clause_rule(Parser *p) { @@ -11748,12 +11912,13 @@ for_if_clause_rule(Parser *p) } comprehension_ty _res = NULL; int _mark = p->mark; - { // ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))* + { // ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))* if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); + int _cut_var = 0; Token * _keyword; Token * _keyword_1; expr_ty a; @@ -11769,12 +11934,14 @@ for_if_clause_rule(Parser *p) && (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && + (_cut_var = 1) + && (b = disjunction_rule(p)) // disjunction && - (c = _loop0_105_rule(p)) // (('if' disjunction))* + (c = _loop0_106_rule(p)) // (('if' disjunction))* ) { - D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); _res = CHECK_VERSION ( 6 , "Async comprehensions are" , _Py_comprehension ( a , b , c , 1 , p -> arena ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -11785,14 +11952,19 @@ for_if_clause_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' disjunction (('if' disjunction))*")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC 'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); + if (_cut_var) { + D(p->level--); + return NULL; + } } - { // 'for' star_targets 'in' disjunction (('if' disjunction))* + { // 'for' star_targets 'in' ~ disjunction (('if' disjunction))* if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); + int _cut_var = 0; Token * _keyword; Token * _keyword_1; expr_ty a; @@ -11805,12 +11977,14 @@ for_if_clause_rule(Parser *p) && (_keyword_1 = _PyPegen_expect_token(p, 518)) // token='in' && + (_cut_var = 1) + && (b = disjunction_rule(p)) // disjunction && - (c = _loop0_106_rule(p)) // (('if' disjunction))* + (c = _loop0_107_rule(p)) // (('if' disjunction))* ) { - D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); _res = _Py_comprehension ( a , b , c , 0 , p -> arena ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -11821,7 +11995,30 @@ for_if_clause_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' disjunction (('if' disjunction))*")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'for' star_targets 'in' ~ disjunction (('if' disjunction))*")); + if (_cut_var) { + D(p->level--); + return NULL; + } + } + { // invalid_for_target + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> for_if_clause[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_for_target")); + void *invalid_for_target_var; + if ( + (invalid_for_target_var = invalid_for_target_rule(p)) // invalid_for_target + ) + { + D(fprintf(stderr, "%*c+ for_if_clause[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_for_target")); + _res = invalid_for_target_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s for_if_clause[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_for_target")); } _res = NULL; done: @@ -12032,7 +12229,7 @@ args_rule(Parser *p) if ( (a = starred_expression_rule(p)) // starred_expression && - (b = _tmp_107_rule(p), 1) // [',' args] + (b = _tmp_108_rule(p), 1) // [',' args] ) { D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "starred_expression [',' args]")); @@ -12101,7 +12298,7 @@ args_rule(Parser *p) if ( (a = named_expression_rule(p)) // named_expression && - (b = _tmp_108_rule(p), 1) // [',' args] + (b = _tmp_109_rule(p), 1) // [',' args] ) { D(fprintf(stderr, "%*c+ args[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression [',' args]")); @@ -12156,11 +12353,11 @@ kwargs_rule(Parser *p) asdl_seq * a; asdl_seq * b; if ( - (a = _gather_109_rule(p)) // ','.kwarg_or_starred+ + (a = _gather_110_rule(p)) // ','.kwarg_or_starred+ && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (b = _gather_111_rule(p)) // ','.kwarg_or_double_starred+ + (b = _gather_112_rule(p)) // ','.kwarg_or_double_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+ ',' ','.kwarg_or_double_starred+")); @@ -12182,13 +12379,13 @@ kwargs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - asdl_seq * _gather_113_var; + asdl_seq * _gather_114_var; if ( - (_gather_113_var = _gather_113_rule(p)) // ','.kwarg_or_starred+ + (_gather_114_var = _gather_114_rule(p)) // ','.kwarg_or_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_starred+")); - _res = _gather_113_var; + _res = _gather_114_var; goto done; } p->mark = _mark; @@ -12201,13 +12398,13 @@ kwargs_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> kwargs[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - asdl_seq * _gather_115_var; + asdl_seq * _gather_116_var; if ( - (_gather_115_var = _gather_115_rule(p)) // ','.kwarg_or_double_starred+ + (_gather_116_var = _gather_116_rule(p)) // ','.kwarg_or_double_starred+ ) { D(fprintf(stderr, "%*c+ kwargs[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','.kwarg_or_double_starred+")); - _res = _gather_115_var; + _res = _gather_116_var; goto done; } p->mark = _mark; @@ -12569,7 +12766,7 @@ star_targets_rule(Parser *p) if ( (a = star_target_rule(p)) // star_target && - (b = _loop0_117_rule(p)) // ((',' star_target))* + (b = _loop0_118_rule(p)) // ((',' star_target))* && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -12623,7 +12820,7 @@ star_targets_seq_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_118_rule(p)) // ','.star_target+ + (a = _gather_119_rule(p)) // ','.star_target+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -12686,7 +12883,7 @@ star_target_rule(Parser *p) if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (a = _tmp_120_rule(p)) // !'*' star_target + (a = _tmp_121_rule(p)) // !'*' star_target ) { D(fprintf(stderr, "%*c+ star_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (!'*' star_target)")); @@ -13208,7 +13405,7 @@ del_targets_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_121_rule(p)) // ','.del_target+ + (a = _gather_122_rule(p)) // ','.del_target+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -13233,8 +13430,8 @@ del_targets_rule(Parser *p) } // del_target: -// | t_primary '.' NAME &del_target_end -// | t_primary '[' slices ']' &del_target_end +// | t_primary '.' NAME !t_lookahead +// | t_primary '[' slices ']' !t_lookahead // | del_t_atom static expr_ty del_target_rule(Parser *p) @@ -13259,12 +13456,12 @@ del_target_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // t_primary '.' NAME &del_target_end + { // t_primary '.' NAME !t_lookahead if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token * _literal; expr_ty a; expr_ty b; @@ -13275,10 +13472,10 @@ del_target_rule(Parser *p) && (b = _PyPegen_name_token(p)) // NAME && - _PyPegen_lookahead(1, del_target_end_rule, p) + _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME &del_target_end")); + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '.' NAME !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -13298,14 +13495,14 @@ del_target_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME &del_target_end")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '.' NAME !t_lookahead")); } - { // t_primary '[' slices ']' &del_target_end + { // t_primary '[' slices ']' !t_lookahead if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); + D(fprintf(stderr, "%*c> del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token * _literal; Token * _literal_1; expr_ty a; @@ -13319,10 +13516,10 @@ del_target_rule(Parser *p) && (_literal_1 = _PyPegen_expect_token(p, 10)) // token=']' && - _PyPegen_lookahead(1, del_target_end_rule, p) + _PyPegen_lookahead(0, t_lookahead_rule, p) ) { - D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); + D(fprintf(stderr, "%*c+ del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); Token *_token = _PyPegen_get_last_nonnwhitespace_token(p); if (_token == NULL) { D(p->level--); @@ -13342,7 +13539,7 @@ del_target_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s del_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' &del_target_end")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "t_primary '[' slices ']' !t_lookahead")); } { // del_t_atom if (p->error_indicator) { @@ -13370,12 +13567,7 @@ del_target_rule(Parser *p) return _res; } -// del_t_atom: -// | NAME &del_target_end -// | '(' del_target ')' -// | '(' del_targets? ')' -// | '[' del_targets? ']' -// | invalid_del_target +// del_t_atom: NAME | '(' del_target ')' | '(' del_targets? ')' | '[' del_targets? ']' static expr_ty del_t_atom_rule(Parser *p) { @@ -13395,20 +13587,18 @@ del_t_atom_rule(Parser *p) UNUSED(_start_lineno); // Only used by EXTRA macro int _start_col_offset = p->tokens[_mark]->col_offset; UNUSED(_start_col_offset); // Only used by EXTRA macro - { // NAME &del_target_end + { // NAME if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); + D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NAME")); expr_ty a; if ( (a = _PyPegen_name_token(p)) // NAME - && - _PyPegen_lookahead(1, del_target_end_rule, p) ) { - D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME &del_target_end")); + D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NAME")); _res = _PyPegen_set_expr_context ( p , a , Del ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -13419,7 +13609,7 @@ del_t_atom_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME &del_target_end")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NAME")); } { // '(' del_target ')' if (p->error_indicator) { @@ -13529,137 +13719,6 @@ del_t_atom_rule(Parser *p) D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'[' del_targets? ']'")); } - { // invalid_del_target - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_t_atom[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); - void *invalid_del_target_var; - if ( - (invalid_del_target_var = invalid_del_target_rule(p)) // invalid_del_target - ) - { - D(fprintf(stderr, "%*c+ del_t_atom[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_del_target")); - _res = invalid_del_target_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_t_atom[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_del_target")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// del_target_end: ')' | ']' | ',' | ';' | NEWLINE -static void * -del_target_end_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // ')' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 8)) // token=')' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); - } - { // ']' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "']'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 10)) // token=']' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "']'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "']'")); - } - { // ',' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 12)) // token=',' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); - } - { // ';' - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'")); - Token * _literal; - if ( - (_literal = _PyPegen_expect_token(p, 13)) // token=';' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'")); - _res = _literal; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'")); - } - { // NEWLINE - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> del_target_end[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - Token * newline_var; - if ( - (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' - ) - { - D(fprintf(stderr, "%*c+ del_target_end[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); - _res = newline_var; - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s del_target_end[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); - } _res = NULL; done: D(p->level--); @@ -13687,7 +13746,7 @@ targets_rule(Parser *p) UNUSED(_opt_var); // Silence compiler warnings asdl_seq * a; if ( - (a = _gather_123_rule(p)) // ','.target+ + (a = _gather_124_rule(p)) // ','.target+ && (_opt_var = _PyPegen_expect_token(p, 12), 1) // ','? ) @@ -14399,7 +14458,7 @@ incorrect_arguments_rule(Parser *p) && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_opt_var = _tmp_125_rule(p), 1) // [args | expression for_if_clauses] + (_opt_var = _tmp_126_rule(p), 1) // [args | expression for_if_clauses] ) { D(fprintf(stderr, "%*c+ incorrect_arguments[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses ',' [args | expression for_if_clauses]")); @@ -14682,14 +14741,14 @@ invalid_assignment_rule(Parser *p) D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); Token * _literal; Token * _literal_1; - asdl_seq * _loop0_126_var; + asdl_seq * _loop0_127_var; expr_ty a; if ( (a = star_named_expression_rule(p)) // star_named_expression && (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_loop0_126_var = _loop0_126_rule(p)) // star_named_expressions* + (_loop0_127_var = _loop0_127_rule(p)) // star_named_expressions* && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' ) @@ -14725,7 +14784,7 @@ invalid_assignment_rule(Parser *p) && (expression_var = expression_rule(p)) // expression && - (_opt_var = _tmp_127_rule(p), 1) // ['=' annotated_rhs] + (_opt_var = _tmp_128_rule(p), 1) // ['=' annotated_rhs] ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); @@ -14748,10 +14807,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); Token * _literal; - asdl_seq * _loop0_128_var; + asdl_seq * _loop0_129_var; expr_ty a; if ( - (_loop0_128_var = _loop0_128_rule(p)) // ((star_targets '='))* + (_loop0_129_var = _loop0_129_rule(p)) // ((star_targets '='))* && (a = star_expressions_rule(p)) // star_expressions && @@ -14759,7 +14818,7 @@ invalid_assignment_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( _PyPegen_get_invalid_target ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( _PyPegen_get_invalid_target ( a ) ) ); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_TARGET ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( GET_INVALID_TARGET ( a ) ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); @@ -14778,10 +14837,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); Token * _literal; - asdl_seq * _loop0_129_var; + asdl_seq * _loop0_130_var; expr_ty a; if ( - (_loop0_129_var = _loop0_129_rule(p)) // ((star_targets '='))* + (_loop0_130_var = _loop0_130_rule(p)) // ((star_targets '='))* && (a = yield_expr_rule(p)) // yield_expr && @@ -14807,7 +14866,7 @@ invalid_assignment_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_130_var; + void *_tmp_131_var; expr_ty a; AugOperator* augassign_var; if ( @@ -14815,7 +14874,7 @@ invalid_assignment_rule(Parser *p) && (augassign_var = augassign_rule(p)) // augassign && - (_tmp_130_var = _tmp_130_rule(p)) // yield_expr | star_expressions + (_tmp_131_var = _tmp_131_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); @@ -14837,6 +14896,50 @@ invalid_assignment_rule(Parser *p) return _res; } +// invalid_del_stmt: 'del' star_expressions +static void * +invalid_del_stmt_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // 'del' star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_del_stmt[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'del' star_expressions")); + Token * _keyword; + expr_ty a; + if ( + (_keyword = _PyPegen_expect_token(p, 503)) // token='del' + && + (a = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ invalid_del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' star_expressions")); + _res = GET_INVALID_DEL_TARGET ( a ) != NULL ? RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_DEL_TARGET ( a ) , "cannot delete %s" , _PyPegen_get_expr_name ( GET_INVALID_DEL_TARGET ( a ) ) ) : RAISE_SYNTAX_ERROR ( "invalid syntax" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_del_stmt[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'del' star_expressions")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + // invalid_block: NEWLINE !INDENT static void * invalid_block_rule(Parser *p) @@ -14897,11 +15000,11 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_131_var; + void *_tmp_132_var; expr_ty a; asdl_seq* for_if_clauses_var; if ( - (_tmp_131_var = _tmp_131_rule(p)) // '[' | '(' | '{' + (_tmp_132_var = _tmp_132_rule(p)) // '[' | '(' | '{' && (a = starred_expression_rule(p)) // starred_expression && @@ -14998,13 +15101,13 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); - asdl_seq * _loop0_132_var; - void *_tmp_133_var; + asdl_seq * _loop0_133_var; + void *_tmp_134_var; arg_ty param_no_default_var; if ( - (_loop0_132_var = _loop0_132_rule(p)) // param_no_default* + (_loop0_133_var = _loop0_133_rule(p)) // param_no_default* && - (_tmp_133_var = _tmp_133_rule(p)) // slash_with_default | param_with_default+ + (_tmp_134_var = _tmp_134_rule(p)) // slash_with_default | param_with_default+ && (param_no_default_var = param_no_default_rule(p)) // param_no_default ) @@ -15046,13 +15149,13 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); - asdl_seq * _loop0_134_var; - void *_tmp_135_var; + asdl_seq * _loop0_135_var; + void *_tmp_136_var; arg_ty lambda_param_no_default_var; if ( - (_loop0_134_var = _loop0_134_rule(p)) // lambda_param_no_default* + (_loop0_135_var = _loop0_135_rule(p)) // lambda_param_no_default* && - (_tmp_135_var = _tmp_135_rule(p)) // lambda_slash_with_default | lambda_param_with_default+ + (_tmp_136_var = _tmp_136_rule(p)) // lambda_slash_with_default | lambda_param_with_default+ && (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) @@ -15094,11 +15197,11 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); Token * _literal; - void *_tmp_136_var; + void *_tmp_137_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_136_var = _tmp_136_rule(p)) // ')' | ',' (')' | '**') + (_tmp_137_var = _tmp_137_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -15168,11 +15271,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_137_var; + void *_tmp_138_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_137_var = _tmp_137_rule(p)) // ':' | ',' (':' | '**') + (_tmp_138_var = _tmp_138_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -15247,9 +15350,9 @@ invalid_double_type_comments_rule(Parser *p) return _res; } -// invalid_del_target: star_expression &del_target_end +// invalid_with_item: expression 'as' expression static void * -invalid_del_target_rule(Parser *p) +invalid_with_item_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -15258,21 +15361,25 @@ invalid_del_target_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // star_expression &del_target_end + { // expression 'as' expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> invalid_del_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); + D(fprintf(stderr, "%*c> invalid_with_item[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression 'as' expression")); + Token * _keyword; expr_ty a; + expr_ty expression_var; if ( - (a = star_expression_rule(p)) // star_expression + (expression_var = expression_rule(p)) // expression + && + (_keyword = _PyPegen_expect_token(p, 520)) // token='as' && - _PyPegen_lookahead(1, del_target_end_rule, p) + (a = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ invalid_del_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression &del_target_end")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "cannot delete %s" , _PyPegen_get_expr_name ( a ) ); + D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_TARGET ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( GET_INVALID_TARGET ( a ) ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); @@ -15281,8 +15388,103 @@ invalid_del_target_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_del_target[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression &del_target_end")); + D(fprintf(stderr, "%*c%s invalid_with_item[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression 'as' expression")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_for_target: ASYNC? 'for' star_expressions +static void * +invalid_for_target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ASYNC? 'for' star_expressions + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_for_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_expressions")); + Token * _keyword; + void *_opt_var; + UNUSED(_opt_var); // Silence compiler warnings + expr_ty a; + if ( + (_opt_var = _PyPegen_expect_token(p, ASYNC), 1) // ASYNC? + && + (_keyword = _PyPegen_expect_token(p, 517)) // token='for' + && + (a = star_expressions_rule(p)) // star_expressions + ) + { + D(fprintf(stderr, "%*c+ invalid_for_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_expressions")); + _res = GET_INVALID_FOR_TARGET ( a ) != NULL ? RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_FOR_TARGET ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( GET_INVALID_FOR_TARGET ( a ) ) ) : RAISE_SYNTAX_ERROR ( "invalid syntax" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_for_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "ASYNC? 'for' star_expressions")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// invalid_group: '(' starred_expression ')' +static void * +invalid_group_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // '(' starred_expression ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_group[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' starred_expression ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = starred_expression_rule(p)) // starred_expression + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ invalid_group[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' starred_expression ')'")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "can't use starred expression here" ); + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_group[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' starred_expression ')'")); } _res = NULL; done: @@ -16181,7 +16383,7 @@ _tmp_15_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_15[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'def'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 522)) // token='def' + (_keyword = _PyPegen_expect_token(p, 523)) // token='def' ) { D(fprintf(stderr, "%*c+ _tmp_15[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'def'")); @@ -16255,7 +16457,7 @@ _tmp_16_rule(Parser *p) D(fprintf(stderr, "%*c> _tmp_16[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'class'")); Token * _keyword; if ( - (_keyword = _PyPegen_expect_token(p, 523)) // token='class' + (_keyword = _PyPegen_expect_token(p, 524)) // token='class' ) { D(fprintf(stderr, "%*c+ _tmp_16[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'class'")); @@ -16582,12 +16784,12 @@ _loop1_22_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_138_var; + void *_tmp_139_var; while ( - (_tmp_138_var = _tmp_138_rule(p)) // star_targets '=' + (_tmp_139_var = _tmp_139_rule(p)) // star_targets '=' ) { - _res = _tmp_138_var; + _res = _tmp_139_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -17008,9 +17210,64 @@ _tmp_29_rule(Parser *p) return _res; } -// _loop0_30: ('.' | '...') +// _tmp_30: ';' | NEWLINE +static void * +_tmp_30_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + void * _res = NULL; + int _mark = p->mark; + { // ';' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "';'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 13)) // token=';' + ) + { + D(fprintf(stderr, "%*c+ _tmp_30[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "';'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_30[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "';'")); + } + { // NEWLINE + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + Token * newline_var; + if ( + (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' + ) + { + D(fprintf(stderr, "%*c+ _tmp_30[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE")); + _res = newline_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_30[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + +// _loop0_31: ('.' | '...') static asdl_seq * -_loop0_30_rule(Parser *p) +_loop0_31_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17034,13 +17291,13 @@ _loop0_30_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_30[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_139_var; + D(fprintf(stderr, "%*c> _loop0_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); + void *_tmp_140_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) // '.' | '...' + (_tmp_140_var = _tmp_140_rule(p)) // '.' | '...' ) { - _res = _tmp_139_var; + _res = _tmp_140_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -17056,7 +17313,7 @@ _loop0_30_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_30[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_31[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17069,14 +17326,14 @@ _loop0_30_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_30_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_31_type, _seq); D(p->level--); return _seq; } -// _loop1_31: ('.' | '...') +// _loop1_32: ('.' | '...') static asdl_seq * -_loop1_31_rule(Parser *p) +_loop1_32_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17100,13 +17357,13 @@ _loop1_31_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_140_var; + D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); + void *_tmp_141_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) // '.' | '...' + (_tmp_141_var = _tmp_141_rule(p)) // '.' | '...' ) { - _res = _tmp_140_var; + _res = _tmp_141_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -17122,7 +17379,7 @@ _loop1_31_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_31[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_32[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('.' | '...')")); } if (_n == 0 || p->error_indicator) { @@ -17140,14 +17397,14 @@ _loop1_31_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_31_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_32_type, _seq); D(p->level--); return _seq; } -// _loop0_33: ',' import_from_as_name +// _loop0_34: ',' import_from_as_name static asdl_seq * -_loop0_33_rule(Parser *p) +_loop0_34_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17171,7 +17428,7 @@ _loop0_33_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name")); + D(fprintf(stderr, "%*c> _loop0_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' import_from_as_name")); Token * _literal; alias_ty elem; while ( @@ -17202,7 +17459,7 @@ _loop0_33_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_33[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_34[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' import_from_as_name")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17215,14 +17472,14 @@ _loop0_33_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_33_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_34_type, _seq); D(p->level--); return _seq; } -// _gather_32: import_from_as_name _loop0_33 +// _gather_33: import_from_as_name _loop0_34 static asdl_seq * -_gather_32_rule(Parser *p) +_gather_33_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17231,27 +17488,27 @@ _gather_32_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // import_from_as_name _loop0_33 + { // import_from_as_name _loop0_34 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); + D(fprintf(stderr, "%*c> _gather_33[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_34")); alias_ty elem; asdl_seq * seq; if ( (elem = import_from_as_name_rule(p)) // import_from_as_name && - (seq = _loop0_33_rule(p)) // _loop0_33 + (seq = _loop0_34_rule(p)) // _loop0_34 ) { - D(fprintf(stderr, "%*c+ _gather_32[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_33")); + D(fprintf(stderr, "%*c+ _gather_33[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "import_from_as_name _loop0_34")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_32[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_33")); + D(fprintf(stderr, "%*c%s _gather_33[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "import_from_as_name _loop0_34")); } _res = NULL; done: @@ -17259,9 +17516,9 @@ _gather_32_rule(Parser *p) return _res; } -// _tmp_34: 'as' NAME +// _tmp_35: 'as' NAME static void * -_tmp_34_rule(Parser *p) +_tmp_35_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17275,16 +17532,16 @@ _tmp_34_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_34[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' + (_keyword = _PyPegen_expect_token(p, 520)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_34[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -17294,7 +17551,7 @@ _tmp_34_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_34[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_35[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -17303,9 +17560,9 @@ _tmp_34_rule(Parser *p) return _res; } -// _loop0_36: ',' dotted_as_name +// _loop0_37: ',' dotted_as_name static asdl_seq * -_loop0_36_rule(Parser *p) +_loop0_37_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17329,7 +17586,7 @@ _loop0_36_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name")); + D(fprintf(stderr, "%*c> _loop0_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' dotted_as_name")); Token * _literal; alias_ty elem; while ( @@ -17360,7 +17617,7 @@ _loop0_36_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_36[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_37[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' dotted_as_name")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17373,14 +17630,14 @@ _loop0_36_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_36_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_37_type, _seq); D(p->level--); return _seq; } -// _gather_35: dotted_as_name _loop0_36 +// _gather_36: dotted_as_name _loop0_37 static asdl_seq * -_gather_35_rule(Parser *p) +_gather_36_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17389,27 +17646,27 @@ _gather_35_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // dotted_as_name _loop0_36 + { // dotted_as_name _loop0_37 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_35[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); + D(fprintf(stderr, "%*c> _gather_36[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_37")); alias_ty elem; asdl_seq * seq; if ( (elem = dotted_as_name_rule(p)) // dotted_as_name && - (seq = _loop0_36_rule(p)) // _loop0_36 + (seq = _loop0_37_rule(p)) // _loop0_37 ) { - D(fprintf(stderr, "%*c+ _gather_35[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_36")); + D(fprintf(stderr, "%*c+ _gather_36[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dotted_as_name _loop0_37")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_35[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_36")); + D(fprintf(stderr, "%*c%s _gather_36[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dotted_as_name _loop0_37")); } _res = NULL; done: @@ -17417,9 +17674,9 @@ _gather_35_rule(Parser *p) return _res; } -// _tmp_37: 'as' NAME +// _tmp_38: 'as' NAME static void * -_tmp_37_rule(Parser *p) +_tmp_38_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17433,16 +17690,16 @@ _tmp_37_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_37[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' + (_keyword = _PyPegen_expect_token(p, 520)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_37[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_38[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -17452,7 +17709,7 @@ _tmp_37_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_37[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_38[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -17461,9 +17718,9 @@ _tmp_37_rule(Parser *p) return _res; } -// _loop0_39: ',' with_item +// _loop0_40: ',' with_item static asdl_seq * -_loop0_39_rule(Parser *p) +_loop0_40_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17487,7 +17744,7 @@ _loop0_39_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -17518,7 +17775,7 @@ _loop0_39_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_39[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_40[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17531,14 +17788,14 @@ _loop0_39_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_39_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_40_type, _seq); D(p->level--); return _seq; } -// _gather_38: with_item _loop0_39 +// _gather_39: with_item _loop0_40 static asdl_seq * -_gather_38_rule(Parser *p) +_gather_39_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17547,27 +17804,27 @@ _gather_38_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_39 + { // with_item _loop0_40 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_38[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); + D(fprintf(stderr, "%*c> _gather_39[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_40")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_39_rule(p)) // _loop0_39 + (seq = _loop0_40_rule(p)) // _loop0_40 ) { - D(fprintf(stderr, "%*c+ _gather_38[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_39")); + D(fprintf(stderr, "%*c+ _gather_39[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_40")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_38[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_39")); + D(fprintf(stderr, "%*c%s _gather_39[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_40")); } _res = NULL; done: @@ -17575,9 +17832,9 @@ _gather_38_rule(Parser *p) return _res; } -// _loop0_41: ',' with_item +// _loop0_42: ',' with_item static asdl_seq * -_loop0_41_rule(Parser *p) +_loop0_42_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17601,7 +17858,7 @@ _loop0_41_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -17632,7 +17889,7 @@ _loop0_41_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_41[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_42[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17645,14 +17902,14 @@ _loop0_41_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_41_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_42_type, _seq); D(p->level--); return _seq; } -// _gather_40: with_item _loop0_41 +// _gather_41: with_item _loop0_42 static asdl_seq * -_gather_40_rule(Parser *p) +_gather_41_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17661,27 +17918,27 @@ _gather_40_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_41 + { // with_item _loop0_42 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_40[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); + D(fprintf(stderr, "%*c> _gather_41[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_42")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_41_rule(p)) // _loop0_41 + (seq = _loop0_42_rule(p)) // _loop0_42 ) { - D(fprintf(stderr, "%*c+ _gather_40[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_41")); + D(fprintf(stderr, "%*c+ _gather_41[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_42")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_40[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_41")); + D(fprintf(stderr, "%*c%s _gather_41[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_42")); } _res = NULL; done: @@ -17689,9 +17946,9 @@ _gather_40_rule(Parser *p) return _res; } -// _loop0_43: ',' with_item +// _loop0_44: ',' with_item static asdl_seq * -_loop0_43_rule(Parser *p) +_loop0_44_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17715,7 +17972,7 @@ _loop0_43_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -17746,7 +18003,7 @@ _loop0_43_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_43[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_44[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17759,14 +18016,14 @@ _loop0_43_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_43_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_44_type, _seq); D(p->level--); return _seq; } -// _gather_42: with_item _loop0_43 +// _gather_43: with_item _loop0_44 static asdl_seq * -_gather_42_rule(Parser *p) +_gather_43_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17775,27 +18032,27 @@ _gather_42_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_43 + { // with_item _loop0_44 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_42[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); + D(fprintf(stderr, "%*c> _gather_43[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_44")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_43_rule(p)) // _loop0_43 + (seq = _loop0_44_rule(p)) // _loop0_44 ) { - D(fprintf(stderr, "%*c+ _gather_42[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_43")); + D(fprintf(stderr, "%*c+ _gather_43[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_44")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_42[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_43")); + D(fprintf(stderr, "%*c%s _gather_43[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_44")); } _res = NULL; done: @@ -17803,9 +18060,9 @@ _gather_42_rule(Parser *p) return _res; } -// _loop0_45: ',' with_item +// _loop0_46: ',' with_item static asdl_seq * -_loop0_45_rule(Parser *p) +_loop0_46_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17829,7 +18086,7 @@ _loop0_45_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); + D(fprintf(stderr, "%*c> _loop0_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' with_item")); Token * _literal; withitem_ty elem; while ( @@ -17860,7 +18117,7 @@ _loop0_45_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_45[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_46[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' with_item")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -17873,14 +18130,14 @@ _loop0_45_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_45_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_46_type, _seq); D(p->level--); return _seq; } -// _gather_44: with_item _loop0_45 +// _gather_45: with_item _loop0_46 static asdl_seq * -_gather_44_rule(Parser *p) +_gather_45_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17889,27 +18146,27 @@ _gather_44_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // with_item _loop0_45 + { // with_item _loop0_46 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_44[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); + D(fprintf(stderr, "%*c> _gather_45[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "with_item _loop0_46")); withitem_ty elem; asdl_seq * seq; if ( (elem = with_item_rule(p)) // with_item && - (seq = _loop0_45_rule(p)) // _loop0_45 + (seq = _loop0_46_rule(p)) // _loop0_46 ) { - D(fprintf(stderr, "%*c+ _gather_44[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_45")); + D(fprintf(stderr, "%*c+ _gather_45[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "with_item _loop0_46")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_44[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_45")); + D(fprintf(stderr, "%*c%s _gather_45[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "with_item _loop0_46")); } _res = NULL; done: @@ -17917,9 +18174,9 @@ _gather_44_rule(Parser *p) return _res; } -// _tmp_46: 'as' target +// _tmp_47: ',' | ')' | ':' static void * -_tmp_46_rule(Parser *p) +_tmp_47_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17928,32 +18185,62 @@ _tmp_46_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // 'as' target + { // ',' if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_46[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' target")); - Token * _keyword; - expr_ty t; + D(fprintf(stderr, "%*c> _tmp_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "','")); + Token * _literal; if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' - && - (t = target_rule(p)) // target + (_literal = _PyPegen_expect_token(p, 12)) // token=',' ) { - D(fprintf(stderr, "%*c+ _tmp_46[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' target")); - _res = t; - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } + D(fprintf(stderr, "%*c+ _tmp_47[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "','")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_47[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "','")); + } + { // ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ _tmp_47[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + _res = _literal; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s _tmp_47[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); + } + { // ':' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> _tmp_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + Token * _literal; + if ( + (_literal = _PyPegen_expect_token(p, 11)) // token=':' + ) + { + D(fprintf(stderr, "%*c+ _tmp_47[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_46[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' target")); + D(fprintf(stderr, "%*c%s _tmp_47[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } _res = NULL; done: @@ -17961,9 +18248,9 @@ _tmp_46_rule(Parser *p) return _res; } -// _loop1_47: except_block +// _loop1_48: except_block static asdl_seq * -_loop1_47_rule(Parser *p) +_loop1_48_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -17987,7 +18274,7 @@ _loop1_47_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_47[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); + D(fprintf(stderr, "%*c> _loop1_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "except_block")); excepthandler_ty except_block_var; while ( (except_block_var = except_block_rule(p)) // except_block @@ -18009,7 +18296,7 @@ _loop1_47_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_47[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_48[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "except_block")); } if (_n == 0 || p->error_indicator) { @@ -18027,14 +18314,14 @@ _loop1_47_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_47_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_48_type, _seq); D(p->level--); return _seq; } -// _tmp_48: 'as' NAME +// _tmp_49: 'as' NAME static void * -_tmp_48_rule(Parser *p) +_tmp_49_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18048,16 +18335,16 @@ _tmp_48_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_48[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c> _tmp_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'as' NAME")); Token * _keyword; expr_ty z; if ( - (_keyword = _PyPegen_expect_token(p, 530)) // token='as' + (_keyword = _PyPegen_expect_token(p, 520)) // token='as' && (z = _PyPegen_name_token(p)) // NAME ) { - D(fprintf(stderr, "%*c+ _tmp_48[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); + D(fprintf(stderr, "%*c+ _tmp_49[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'as' NAME")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -18067,7 +18354,7 @@ _tmp_48_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_48[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_49[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'as' NAME")); } _res = NULL; @@ -18076,9 +18363,9 @@ _tmp_48_rule(Parser *p) return _res; } -// _tmp_49: 'from' expression +// _tmp_50: 'from' expression static void * -_tmp_49_rule(Parser *p) +_tmp_50_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18092,7 +18379,7 @@ _tmp_49_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_49[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression")); + D(fprintf(stderr, "%*c> _tmp_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'from' expression")); Token * _keyword; expr_ty z; if ( @@ -18101,7 +18388,7 @@ _tmp_49_rule(Parser *p) (z = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_49[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression")); + D(fprintf(stderr, "%*c+ _tmp_50[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'from' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -18111,7 +18398,7 @@ _tmp_49_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_49[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_50[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'from' expression")); } _res = NULL; @@ -18120,9 +18407,9 @@ _tmp_49_rule(Parser *p) return _res; } -// _tmp_50: '->' expression +// _tmp_51: '->' expression static void * -_tmp_50_rule(Parser *p) +_tmp_51_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18136,7 +18423,7 @@ _tmp_50_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_50[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty z; if ( @@ -18145,7 +18432,7 @@ _tmp_50_rule(Parser *p) (z = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_50[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -18155,7 +18442,7 @@ _tmp_50_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_50[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_51[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -18164,9 +18451,9 @@ _tmp_50_rule(Parser *p) return _res; } -// _tmp_51: '->' expression +// _tmp_52: '->' expression static void * -_tmp_51_rule(Parser *p) +_tmp_52_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18180,7 +18467,7 @@ _tmp_51_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_51[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c> _tmp_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'->' expression")); Token * _literal; expr_ty z; if ( @@ -18189,7 +18476,7 @@ _tmp_51_rule(Parser *p) (z = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_51[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); + D(fprintf(stderr, "%*c+ _tmp_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'->' expression")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -18199,7 +18486,7 @@ _tmp_51_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_51[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_52[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'->' expression")); } _res = NULL; @@ -18208,9 +18495,9 @@ _tmp_51_rule(Parser *p) return _res; } -// _tmp_52: NEWLINE INDENT +// _tmp_53: NEWLINE INDENT static void * -_tmp_52_rule(Parser *p) +_tmp_53_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18224,7 +18511,7 @@ _tmp_52_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_52[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + D(fprintf(stderr, "%*c> _tmp_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); Token * indent_var; Token * newline_var; if ( @@ -18233,12 +18520,12 @@ _tmp_52_rule(Parser *p) (indent_var = _PyPegen_expect_token(p, INDENT)) // token='INDENT' ) { - D(fprintf(stderr, "%*c+ _tmp_52[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); + D(fprintf(stderr, "%*c+ _tmp_53[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "NEWLINE INDENT")); _res = _PyPegen_dummy_name(p, newline_var, indent_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_52[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_53[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "NEWLINE INDENT")); } _res = NULL; @@ -18247,9 +18534,9 @@ _tmp_52_rule(Parser *p) return _res; } -// _loop0_53: param_no_default +// _loop0_54: param_no_default static asdl_seq * -_loop0_53_rule(Parser *p) +_loop0_54_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18273,7 +18560,7 @@ _loop0_53_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_53[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -18295,7 +18582,7 @@ _loop0_53_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_53[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -18308,14 +18595,14 @@ _loop0_53_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_53_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); D(p->level--); return _seq; } -// _loop0_54: param_with_default +// _loop0_55: param_with_default static asdl_seq * -_loop0_54_rule(Parser *p) +_loop0_55_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18339,7 +18626,7 @@ _loop0_54_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_54[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -18361,7 +18648,7 @@ _loop0_54_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_54[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -18374,14 +18661,14 @@ _loop0_54_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_54_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); D(p->level--); return _seq; } -// _loop0_55: param_with_default +// _loop0_56: param_with_default static asdl_seq * -_loop0_55_rule(Parser *p) +_loop0_56_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18405,7 +18692,7 @@ _loop0_55_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_55[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop0_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -18427,7 +18714,7 @@ _loop0_55_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_55[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_56[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -18440,14 +18727,14 @@ _loop0_55_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_55_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_56_type, _seq); D(p->level--); return _seq; } -// _loop1_56: param_no_default +// _loop1_57: param_no_default static asdl_seq * -_loop1_56_rule(Parser *p) +_loop1_57_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18471,7 +18758,7 @@ _loop1_56_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_56[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -18493,7 +18780,7 @@ _loop1_56_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_56[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_57[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -18511,14 +18798,14 @@ _loop1_56_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_56_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_57_type, _seq); D(p->level--); return _seq; } -// _loop0_57: param_with_default +// _loop0_58: param_with_default static asdl_seq * -_loop0_57_rule(Parser *p) +_loop0_58_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18542,7 +18829,7 @@ _loop0_57_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_57[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop0_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -18564,7 +18851,7 @@ _loop0_57_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_57[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_58[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -18577,14 +18864,14 @@ _loop0_57_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_57_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_58_type, _seq); D(p->level--); return _seq; } -// _loop1_58: param_with_default +// _loop1_59: param_with_default static asdl_seq * -_loop1_58_rule(Parser *p) +_loop1_59_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18608,7 +18895,7 @@ _loop1_58_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_58[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -18630,7 +18917,7 @@ _loop1_58_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_58[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_59[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -18648,14 +18935,14 @@ _loop1_58_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_58_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); D(p->level--); return _seq; } -// _loop1_59: param_no_default +// _loop1_60: param_no_default static asdl_seq * -_loop1_59_rule(Parser *p) +_loop1_60_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18679,7 +18966,7 @@ _loop1_59_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_59[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -18701,7 +18988,7 @@ _loop1_59_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_59[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -18719,14 +19006,14 @@ _loop1_59_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_59_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); D(p->level--); return _seq; } -// _loop1_60: param_no_default +// _loop1_61: param_no_default static asdl_seq * -_loop1_60_rule(Parser *p) +_loop1_61_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18750,7 +19037,7 @@ _loop1_60_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_60[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop1_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -18772,7 +19059,7 @@ _loop1_60_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_60[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_61[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -18790,14 +19077,14 @@ _loop1_60_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_60_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_61_type, _seq); D(p->level--); return _seq; } -// _loop0_61: param_no_default +// _loop0_62: param_no_default static asdl_seq * -_loop0_61_rule(Parser *p) +_loop0_62_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18821,7 +19108,7 @@ _loop0_61_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_61[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -18843,7 +19130,7 @@ _loop0_61_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_61[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_62[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -18856,14 +19143,14 @@ _loop0_61_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_61_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_62_type, _seq); D(p->level--); return _seq; } -// _loop1_62: param_with_default +// _loop1_63: param_with_default static asdl_seq * -_loop1_62_rule(Parser *p) +_loop1_63_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18887,7 +19174,7 @@ _loop1_62_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_62[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -18909,7 +19196,7 @@ _loop1_62_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_62[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_63[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -18927,14 +19214,14 @@ _loop1_62_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_62_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_63_type, _seq); D(p->level--); return _seq; } -// _loop0_63: param_no_default +// _loop0_64: param_no_default static asdl_seq * -_loop0_63_rule(Parser *p) +_loop0_64_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -18958,7 +19245,7 @@ _loop0_63_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_63[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -18980,7 +19267,7 @@ _loop0_63_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_63[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_64[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -18993,14 +19280,14 @@ _loop0_63_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_63_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_64_type, _seq); D(p->level--); return _seq; } -// _loop1_64: param_with_default +// _loop1_65: param_with_default static asdl_seq * -_loop1_64_rule(Parser *p) +_loop1_65_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19024,7 +19311,7 @@ _loop1_64_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_64[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -19046,7 +19333,7 @@ _loop1_64_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_64[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_65[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -19064,14 +19351,14 @@ _loop1_64_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_64_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_65_type, _seq); D(p->level--); return _seq; } -// _loop0_65: param_maybe_default +// _loop0_66: param_maybe_default static asdl_seq * -_loop0_65_rule(Parser *p) +_loop0_66_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19095,7 +19382,7 @@ _loop0_65_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_65[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -19117,7 +19404,7 @@ _loop0_65_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_65[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_66[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -19130,14 +19417,14 @@ _loop0_65_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_65_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_66_type, _seq); D(p->level--); return _seq; } -// _loop1_66: param_maybe_default +// _loop1_67: param_maybe_default static asdl_seq * -_loop1_66_rule(Parser *p) +_loop1_67_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19161,7 +19448,7 @@ _loop1_66_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_66[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_maybe_default")); NameDefaultPair* param_maybe_default_var; while ( (param_maybe_default_var = param_maybe_default_rule(p)) // param_maybe_default @@ -19183,7 +19470,7 @@ _loop1_66_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_66[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_67[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -19201,14 +19488,14 @@ _loop1_66_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_66_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); D(p->level--); return _seq; } -// _loop1_67: ('@' named_expression NEWLINE) +// _loop1_68: ('@' named_expression NEWLINE) static asdl_seq * -_loop1_67_rule(Parser *p) +_loop1_68_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19232,13 +19519,13 @@ _loop1_67_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_67[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_141_var; + D(fprintf(stderr, "%*c> _loop1_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); + void *_tmp_142_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) // '@' named_expression NEWLINE + (_tmp_142_var = _tmp_142_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_141_var; + _res = _tmp_142_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19254,7 +19541,7 @@ _loop1_67_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_67[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_68[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('@' named_expression NEWLINE)")); } if (_n == 0 || p->error_indicator) { @@ -19272,14 +19559,14 @@ _loop1_67_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_67_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_68_type, _seq); D(p->level--); return _seq; } -// _tmp_68: '(' arguments? ')' +// _tmp_69: '(' arguments? ')' static void * -_tmp_68_rule(Parser *p) +_tmp_69_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19293,7 +19580,7 @@ _tmp_68_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c> _tmp_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); Token * _literal; Token * _literal_1; void *z; @@ -19305,7 +19592,7 @@ _tmp_68_rule(Parser *p) (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_68[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); + D(fprintf(stderr, "%*c+ _tmp_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' arguments? ')'")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -19315,7 +19602,7 @@ _tmp_68_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_68[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_69[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' arguments? ')'")); } _res = NULL; @@ -19324,9 +19611,9 @@ _tmp_68_rule(Parser *p) return _res; } -// _loop0_70: ',' star_expression +// _loop0_71: ',' star_expression static asdl_seq * -_loop0_70_rule(Parser *p) +_loop0_71_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19350,7 +19637,7 @@ _loop0_70_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _loop0_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty elem; while ( @@ -19381,7 +19668,7 @@ _loop0_70_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_70[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_71[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -19394,14 +19681,14 @@ _loop0_70_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_70_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_71_type, _seq); D(p->level--); return _seq; } -// _gather_69: star_expression _loop0_70 +// _gather_70: star_expression _loop0_71 static asdl_seq * -_gather_69_rule(Parser *p) +_gather_70_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19410,27 +19697,27 @@ _gather_69_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // star_expression _loop0_70 + { // star_expression _loop0_71 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_69[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); + D(fprintf(stderr, "%*c> _gather_70[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_71")); expr_ty elem; asdl_seq * seq; if ( (elem = star_expression_rule(p)) // star_expression && - (seq = _loop0_70_rule(p)) // _loop0_70 + (seq = _loop0_71_rule(p)) // _loop0_71 ) { - D(fprintf(stderr, "%*c+ _gather_69[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_70")); + D(fprintf(stderr, "%*c+ _gather_70[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expression _loop0_71")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_69[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression _loop0_70")); + D(fprintf(stderr, "%*c%s _gather_70[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expression _loop0_71")); } _res = NULL; done: @@ -19438,9 +19725,9 @@ _gather_69_rule(Parser *p) return _res; } -// _loop1_71: (',' star_expression) +// _loop1_72: (',' star_expression) static asdl_seq * -_loop1_71_rule(Parser *p) +_loop1_72_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19464,13 +19751,13 @@ _loop1_71_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_71[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_142_var; + D(fprintf(stderr, "%*c> _loop1_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); + void *_tmp_143_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) // ',' star_expression + (_tmp_143_var = _tmp_143_rule(p)) // ',' star_expression ) { - _res = _tmp_142_var; + _res = _tmp_143_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19486,7 +19773,7 @@ _loop1_71_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_71[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_72[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_expression)")); } if (_n == 0 || p->error_indicator) { @@ -19504,14 +19791,14 @@ _loop1_71_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_71_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_72_type, _seq); D(p->level--); return _seq; } -// _loop0_73: ',' star_named_expression +// _loop0_74: ',' star_named_expression static asdl_seq * -_loop0_73_rule(Parser *p) +_loop0_74_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19535,7 +19822,7 @@ _loop0_73_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression")); + D(fprintf(stderr, "%*c> _loop0_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_named_expression")); Token * _literal; expr_ty elem; while ( @@ -19566,7 +19853,7 @@ _loop0_73_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_73[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_74[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_named_expression")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -19579,14 +19866,14 @@ _loop0_73_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_73_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_74_type, _seq); D(p->level--); return _seq; } -// _gather_72: star_named_expression _loop0_73 +// _gather_73: star_named_expression _loop0_74 static asdl_seq * -_gather_72_rule(Parser *p) +_gather_73_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19595,27 +19882,27 @@ _gather_72_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // star_named_expression _loop0_73 + { // star_named_expression _loop0_74 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); + D(fprintf(stderr, "%*c> _gather_73[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_74")); expr_ty elem; asdl_seq * seq; if ( (elem = star_named_expression_rule(p)) // star_named_expression && - (seq = _loop0_73_rule(p)) // _loop0_73 + (seq = _loop0_74_rule(p)) // _loop0_74 ) { - D(fprintf(stderr, "%*c+ _gather_72[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_73")); + D(fprintf(stderr, "%*c+ _gather_73[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression _loop0_74")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_72[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_73")); + D(fprintf(stderr, "%*c%s _gather_73[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression _loop0_74")); } _res = NULL; done: @@ -19623,9 +19910,9 @@ _gather_72_rule(Parser *p) return _res; } -// _loop1_74: (',' expression) +// _loop1_75: (',' expression) static asdl_seq * -_loop1_74_rule(Parser *p) +_loop1_75_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19649,13 +19936,13 @@ _loop1_74_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_74[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_143_var; + D(fprintf(stderr, "%*c> _loop1_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); + void *_tmp_144_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) // ',' expression + (_tmp_144_var = _tmp_144_rule(p)) // ',' expression ) { - _res = _tmp_143_var; + _res = _tmp_144_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19671,7 +19958,7 @@ _loop1_74_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_74[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_75[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' expression)")); } if (_n == 0 || p->error_indicator) { @@ -19689,14 +19976,14 @@ _loop1_74_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_74_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_75_type, _seq); D(p->level--); return _seq; } -// _loop0_75: lambda_param_no_default +// _loop0_76: lambda_param_no_default static asdl_seq * -_loop0_75_rule(Parser *p) +_loop0_76_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19720,7 +20007,7 @@ _loop0_75_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -19742,7 +20029,7 @@ _loop0_75_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_75[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_76[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -19755,14 +20042,14 @@ _loop0_75_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_75_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); D(p->level--); return _seq; } -// _loop0_76: lambda_param_with_default +// _loop0_77: lambda_param_with_default static asdl_seq * -_loop0_76_rule(Parser *p) +_loop0_77_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19786,7 +20073,7 @@ _loop0_76_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_76[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -19808,7 +20095,7 @@ _loop0_76_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_76[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -19821,14 +20108,14 @@ _loop0_76_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_76_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); D(p->level--); return _seq; } -// _loop0_77: lambda_param_with_default +// _loop0_78: lambda_param_with_default static asdl_seq * -_loop0_77_rule(Parser *p) +_loop0_78_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19852,7 +20139,7 @@ _loop0_77_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_77[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop0_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -19874,7 +20161,7 @@ _loop0_77_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_77[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_78[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -19887,14 +20174,14 @@ _loop0_77_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_77_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_78_type, _seq); D(p->level--); return _seq; } -// _loop1_78: lambda_param_no_default +// _loop1_79: lambda_param_no_default static asdl_seq * -_loop1_78_rule(Parser *p) +_loop1_79_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19918,7 +20205,7 @@ _loop1_78_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_78[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop1_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -19940,7 +20227,7 @@ _loop1_78_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_78[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_79[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -19958,14 +20245,14 @@ _loop1_78_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_78_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_79_type, _seq); D(p->level--); return _seq; } -// _loop0_79: lambda_param_with_default +// _loop0_80: lambda_param_with_default static asdl_seq * -_loop0_79_rule(Parser *p) +_loop0_80_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -19989,7 +20276,7 @@ _loop0_79_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_79[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop0_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -20011,7 +20298,7 @@ _loop0_79_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_79[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_80[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -20024,14 +20311,14 @@ _loop0_79_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_79_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_80_type, _seq); D(p->level--); return _seq; } -// _loop1_80: lambda_param_with_default +// _loop1_81: lambda_param_with_default static asdl_seq * -_loop1_80_rule(Parser *p) +_loop1_81_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20055,7 +20342,7 @@ _loop1_80_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_80[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -20077,7 +20364,7 @@ _loop1_80_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_80[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -20095,14 +20382,14 @@ _loop1_80_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_80_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); D(p->level--); return _seq; } -// _loop1_81: lambda_param_no_default +// _loop1_82: lambda_param_no_default static asdl_seq * -_loop1_81_rule(Parser *p) +_loop1_82_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20126,7 +20413,7 @@ _loop1_81_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_81[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -20148,7 +20435,7 @@ _loop1_81_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_81[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -20166,14 +20453,14 @@ _loop1_81_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_81_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); D(p->level--); return _seq; } -// _loop1_82: lambda_param_no_default +// _loop1_83: lambda_param_no_default static asdl_seq * -_loop1_82_rule(Parser *p) +_loop1_83_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20197,7 +20484,7 @@ _loop1_82_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_82[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop1_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -20219,7 +20506,7 @@ _loop1_82_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_82[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_83[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } if (_n == 0 || p->error_indicator) { @@ -20237,14 +20524,14 @@ _loop1_82_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_82_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_83_type, _seq); D(p->level--); return _seq; } -// _loop0_83: lambda_param_no_default +// _loop0_84: lambda_param_no_default static asdl_seq * -_loop0_83_rule(Parser *p) +_loop0_84_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20268,7 +20555,7 @@ _loop0_83_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_83[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -20290,7 +20577,7 @@ _loop0_83_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_83[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_84[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -20303,14 +20590,14 @@ _loop0_83_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_83_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_84_type, _seq); D(p->level--); return _seq; } -// _loop1_84: lambda_param_with_default +// _loop1_85: lambda_param_with_default static asdl_seq * -_loop1_84_rule(Parser *p) +_loop1_85_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20334,7 +20621,7 @@ _loop1_84_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_84[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -20356,7 +20643,7 @@ _loop1_84_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_84[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_85[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -20374,14 +20661,14 @@ _loop1_84_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_84_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_85_type, _seq); D(p->level--); return _seq; } -// _loop0_85: lambda_param_no_default +// _loop0_86: lambda_param_no_default static asdl_seq * -_loop0_85_rule(Parser *p) +_loop0_86_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20405,7 +20692,7 @@ _loop0_85_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_85[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -20427,7 +20714,7 @@ _loop0_85_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_85[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_86[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -20440,14 +20727,14 @@ _loop0_85_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_85_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_86_type, _seq); D(p->level--); return _seq; } -// _loop1_86: lambda_param_with_default +// _loop1_87: lambda_param_with_default static asdl_seq * -_loop1_86_rule(Parser *p) +_loop1_87_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20471,7 +20758,7 @@ _loop1_86_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_86[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -20493,7 +20780,7 @@ _loop1_86_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_86[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_87[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -20511,14 +20798,14 @@ _loop1_86_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_86_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_87_type, _seq); D(p->level--); return _seq; } -// _loop0_87: lambda_param_maybe_default +// _loop0_88: lambda_param_maybe_default static asdl_seq * -_loop0_87_rule(Parser *p) +_loop0_88_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20542,7 +20829,7 @@ _loop0_87_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_87[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop0_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -20564,7 +20851,7 @@ _loop0_87_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_87[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_88[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -20577,14 +20864,14 @@ _loop0_87_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_87_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_88_type, _seq); D(p->level--); return _seq; } -// _loop1_88: lambda_param_maybe_default +// _loop1_89: lambda_param_maybe_default static asdl_seq * -_loop1_88_rule(Parser *p) +_loop1_89_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20608,7 +20895,7 @@ _loop1_88_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_88[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); + D(fprintf(stderr, "%*c> _loop1_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_maybe_default")); NameDefaultPair* lambda_param_maybe_default_var; while ( (lambda_param_maybe_default_var = lambda_param_maybe_default_rule(p)) // lambda_param_maybe_default @@ -20630,7 +20917,7 @@ _loop1_88_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_88[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_89[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_maybe_default")); } if (_n == 0 || p->error_indicator) { @@ -20648,14 +20935,14 @@ _loop1_88_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_88_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); D(p->level--); return _seq; } -// _loop1_89: ('or' conjunction) +// _loop1_90: ('or' conjunction) static asdl_seq * -_loop1_89_rule(Parser *p) +_loop1_90_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20679,13 +20966,13 @@ _loop1_89_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_89[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_144_var; + D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); + void *_tmp_145_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) // 'or' conjunction + (_tmp_145_var = _tmp_145_rule(p)) // 'or' conjunction ) { - _res = _tmp_144_var; + _res = _tmp_145_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -20701,7 +20988,7 @@ _loop1_89_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_89[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_90[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('or' conjunction)")); } if (_n == 0 || p->error_indicator) { @@ -20719,14 +21006,14 @@ _loop1_89_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_89_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); D(p->level--); return _seq; } -// _loop1_90: ('and' inversion) +// _loop1_91: ('and' inversion) static asdl_seq * -_loop1_90_rule(Parser *p) +_loop1_91_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20750,13 +21037,13 @@ _loop1_90_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_145_var; + D(fprintf(stderr, "%*c> _loop1_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); + void *_tmp_146_var; while ( - (_tmp_145_var = _tmp_145_rule(p)) // 'and' inversion + (_tmp_146_var = _tmp_146_rule(p)) // 'and' inversion ) { - _res = _tmp_145_var; + _res = _tmp_146_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -20772,7 +21059,7 @@ _loop1_90_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_90[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_91[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('and' inversion)")); } if (_n == 0 || p->error_indicator) { @@ -20790,14 +21077,14 @@ _loop1_90_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_90_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); D(p->level--); return _seq; } -// _loop1_91: compare_op_bitwise_or_pair +// _loop1_92: compare_op_bitwise_or_pair static asdl_seq * -_loop1_91_rule(Parser *p) +_loop1_92_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20821,7 +21108,7 @@ _loop1_91_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair")); + D(fprintf(stderr, "%*c> _loop1_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "compare_op_bitwise_or_pair")); CmpopExprPair* compare_op_bitwise_or_pair_var; while ( (compare_op_bitwise_or_pair_var = compare_op_bitwise_or_pair_rule(p)) // compare_op_bitwise_or_pair @@ -20843,7 +21130,7 @@ _loop1_91_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_91[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_92[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "compare_op_bitwise_or_pair")); } if (_n == 0 || p->error_indicator) { @@ -20861,14 +21148,14 @@ _loop1_91_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_91_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_92_type, _seq); D(p->level--); return _seq; } -// _tmp_92: '!=' +// _tmp_93: '!=' static void * -_tmp_92_rule(Parser *p) +_tmp_93_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20882,13 +21169,13 @@ _tmp_92_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_92[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='")); + D(fprintf(stderr, "%*c> _tmp_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'!='")); Token * tok; if ( (tok = _PyPegen_expect_token(p, 28)) // token='!=' ) { - D(fprintf(stderr, "%*c+ _tmp_92[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='")); + D(fprintf(stderr, "%*c+ _tmp_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'!='")); _res = _PyPegen_check_barry_as_flufl ( p ) ? NULL : tok; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -20898,7 +21185,7 @@ _tmp_92_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_92[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_93[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'!='")); } _res = NULL; @@ -20907,9 +21194,9 @@ _tmp_92_rule(Parser *p) return _res; } -// _loop0_94: ',' slice +// _loop0_95: ',' slice static asdl_seq * -_loop0_94_rule(Parser *p) +_loop0_95_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20933,7 +21220,7 @@ _loop0_94_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' slice")); + D(fprintf(stderr, "%*c> _loop0_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' slice")); Token * _literal; expr_ty elem; while ( @@ -20964,7 +21251,7 @@ _loop0_94_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_94[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_95[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' slice")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -20977,14 +21264,14 @@ _loop0_94_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_94_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_95_type, _seq); D(p->level--); return _seq; } -// _gather_93: slice _loop0_94 +// _gather_94: slice _loop0_95 static asdl_seq * -_gather_93_rule(Parser *p) +_gather_94_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -20993,27 +21280,27 @@ _gather_93_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // slice _loop0_94 + { // slice _loop0_95 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_93[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); + D(fprintf(stderr, "%*c> _gather_94[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slice _loop0_95")); expr_ty elem; asdl_seq * seq; if ( (elem = slice_rule(p)) // slice && - (seq = _loop0_94_rule(p)) // _loop0_94 + (seq = _loop0_95_rule(p)) // _loop0_95 ) { - D(fprintf(stderr, "%*c+ _gather_93[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice _loop0_94")); + D(fprintf(stderr, "%*c+ _gather_94[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slice _loop0_95")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_93[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice _loop0_94")); + D(fprintf(stderr, "%*c%s _gather_94[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slice _loop0_95")); } _res = NULL; done: @@ -21021,9 +21308,9 @@ _gather_93_rule(Parser *p) return _res; } -// _tmp_95: ':' expression? +// _tmp_96: ':' expression? static void * -_tmp_95_rule(Parser *p) +_tmp_96_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21037,7 +21324,7 @@ _tmp_95_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_95[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?")); + D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':' expression?")); Token * _literal; void *d; if ( @@ -21046,7 +21333,7 @@ _tmp_95_rule(Parser *p) (d = expression_rule(p), 1) // expression? ) { - D(fprintf(stderr, "%*c+ _tmp_95[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?")); + D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':' expression?")); _res = d; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -21056,7 +21343,7 @@ _tmp_95_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_95[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':' expression?")); } _res = NULL; @@ -21065,9 +21352,9 @@ _tmp_95_rule(Parser *p) return _res; } -// _tmp_96: tuple | group | genexp +// _tmp_97: tuple | group | genexp static void * -_tmp_96_rule(Parser *p) +_tmp_97_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21081,18 +21368,18 @@ _tmp_96_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); expr_ty tuple_var; if ( (tuple_var = tuple_rule(p)) // tuple ) { - D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); _res = tuple_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); } { // group @@ -21100,18 +21387,18 @@ _tmp_96_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group")); + D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "group")); expr_ty group_var; if ( (group_var = group_rule(p)) // group ) { - D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group")); + D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "group")); _res = group_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "group")); } { // genexp @@ -21119,18 +21406,18 @@ _tmp_96_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_96[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "genexp")); expr_ty genexp_var; if ( (genexp_var = genexp_rule(p)) // genexp ) { - D(fprintf(stderr, "%*c+ _tmp_96[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); + D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "genexp")); _res = genexp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_96[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "genexp")); } _res = NULL; @@ -21139,9 +21426,9 @@ _tmp_96_rule(Parser *p) return _res; } -// _tmp_97: list | listcomp +// _tmp_98: list | listcomp static void * -_tmp_97_rule(Parser *p) +_tmp_98_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21155,18 +21442,18 @@ _tmp_97_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); expr_ty list_var; if ( (list_var = list_rule(p)) // list ) { - D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); _res = list_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); } { // listcomp @@ -21174,18 +21461,18 @@ _tmp_97_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_97[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp")); + D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "listcomp")); expr_ty listcomp_var; if ( (listcomp_var = listcomp_rule(p)) // listcomp ) { - D(fprintf(stderr, "%*c+ _tmp_97[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp")); + D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "listcomp")); _res = listcomp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_97[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "listcomp")); } _res = NULL; @@ -21194,9 +21481,9 @@ _tmp_97_rule(Parser *p) return _res; } -// _tmp_98: dict | set | dictcomp | setcomp +// _tmp_99: dict | set | dictcomp | setcomp static void * -_tmp_98_rule(Parser *p) +_tmp_99_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21210,18 +21497,18 @@ _tmp_98_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict")); + D(fprintf(stderr, "%*c> _tmp_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dict")); expr_ty dict_var; if ( (dict_var = dict_rule(p)) // dict ) { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict")); + D(fprintf(stderr, "%*c+ _tmp_99[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dict")); _res = dict_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_99[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dict")); } { // set @@ -21229,18 +21516,18 @@ _tmp_98_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set")); + D(fprintf(stderr, "%*c> _tmp_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "set")); expr_ty set_var; if ( (set_var = set_rule(p)) // set ) { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set")); + D(fprintf(stderr, "%*c+ _tmp_99[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "set")); _res = set_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_99[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "set")); } { // dictcomp @@ -21248,18 +21535,18 @@ _tmp_98_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp")); + D(fprintf(stderr, "%*c> _tmp_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "dictcomp")); expr_ty dictcomp_var; if ( (dictcomp_var = dictcomp_rule(p)) // dictcomp ) { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp")); + D(fprintf(stderr, "%*c+ _tmp_99[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "dictcomp")); _res = dictcomp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_99[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "dictcomp")); } { // setcomp @@ -21267,18 +21554,18 @@ _tmp_98_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_98[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp")); + D(fprintf(stderr, "%*c> _tmp_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "setcomp")); expr_ty setcomp_var; if ( (setcomp_var = setcomp_rule(p)) // setcomp ) { - D(fprintf(stderr, "%*c+ _tmp_98[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp")); + D(fprintf(stderr, "%*c+ _tmp_99[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "setcomp")); _res = setcomp_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_98[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_99[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "setcomp")); } _res = NULL; @@ -21287,9 +21574,9 @@ _tmp_98_rule(Parser *p) return _res; } -// _loop1_99: STRING +// _loop1_100: STRING static asdl_seq * -_loop1_99_rule(Parser *p) +_loop1_100_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21313,7 +21600,7 @@ _loop1_99_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_99[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); + D(fprintf(stderr, "%*c> _loop1_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "STRING")); expr_ty string_var; while ( (string_var = _PyPegen_string_token(p)) // STRING @@ -21335,7 +21622,7 @@ _loop1_99_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_99[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_100[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "STRING")); } if (_n == 0 || p->error_indicator) { @@ -21353,14 +21640,14 @@ _loop1_99_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_99_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_100_type, _seq); D(p->level--); return _seq; } -// _tmp_100: star_named_expression ',' star_named_expressions? +// _tmp_101: star_named_expression ',' star_named_expressions? static void * -_tmp_100_rule(Parser *p) +_tmp_101_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21374,7 +21661,7 @@ _tmp_100_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_100[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); + D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); Token * _literal; expr_ty y; void *z; @@ -21386,7 +21673,7 @@ _tmp_100_rule(Parser *p) (z = star_named_expressions_rule(p), 1) // star_named_expressions? ) { - D(fprintf(stderr, "%*c+ _tmp_100[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); + D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions?")); _res = _PyPegen_seq_insert_in_front ( p , y , z ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -21396,7 +21683,7 @@ _tmp_100_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_100[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions?")); } _res = NULL; @@ -21405,9 +21692,9 @@ _tmp_100_rule(Parser *p) return _res; } -// _tmp_101: yield_expr | named_expression +// _tmp_102: yield_expr | named_expression static void * -_tmp_101_rule(Parser *p) +_tmp_102_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21421,18 +21708,18 @@ _tmp_101_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_102[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_102[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // named_expression @@ -21440,18 +21727,18 @@ _tmp_101_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_101[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); + D(fprintf(stderr, "%*c> _tmp_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "named_expression")); expr_ty named_expression_var; if ( (named_expression_var = named_expression_rule(p)) // named_expression ) { - D(fprintf(stderr, "%*c+ _tmp_101[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); + D(fprintf(stderr, "%*c+ _tmp_102[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "named_expression")); _res = named_expression_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_101[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_102[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "named_expression")); } _res = NULL; @@ -21460,9 +21747,9 @@ _tmp_101_rule(Parser *p) return _res; } -// _loop0_103: ',' double_starred_kvpair +// _loop0_104: ',' double_starred_kvpair static asdl_seq * -_loop0_103_rule(Parser *p) +_loop0_104_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21486,7 +21773,7 @@ _loop0_103_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); + D(fprintf(stderr, "%*c> _loop0_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' double_starred_kvpair")); Token * _literal; KeyValuePair* elem; while ( @@ -21517,7 +21804,7 @@ _loop0_103_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_103[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_104[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' double_starred_kvpair")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -21530,14 +21817,14 @@ _loop0_103_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_103_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_104_type, _seq); D(p->level--); return _seq; } -// _gather_102: double_starred_kvpair _loop0_103 +// _gather_103: double_starred_kvpair _loop0_104 static asdl_seq * -_gather_102_rule(Parser *p) +_gather_103_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21546,27 +21833,27 @@ _gather_102_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // double_starred_kvpair _loop0_103 + { // double_starred_kvpair _loop0_104 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_102[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); + D(fprintf(stderr, "%*c> _gather_103[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_104")); KeyValuePair* elem; asdl_seq * seq; if ( (elem = double_starred_kvpair_rule(p)) // double_starred_kvpair && - (seq = _loop0_103_rule(p)) // _loop0_103 + (seq = _loop0_104_rule(p)) // _loop0_104 ) { - D(fprintf(stderr, "%*c+ _gather_102[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_103")); + D(fprintf(stderr, "%*c+ _gather_103[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "double_starred_kvpair _loop0_104")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_102[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_103")); + D(fprintf(stderr, "%*c%s _gather_103[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "double_starred_kvpair _loop0_104")); } _res = NULL; done: @@ -21574,9 +21861,9 @@ _gather_102_rule(Parser *p) return _res; } -// _loop1_104: for_if_clause +// _loop1_105: for_if_clause static asdl_seq * -_loop1_104_rule(Parser *p) +_loop1_105_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21600,7 +21887,7 @@ _loop1_104_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_104[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause")); + D(fprintf(stderr, "%*c> _loop1_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "for_if_clause")); comprehension_ty for_if_clause_var; while ( (for_if_clause_var = for_if_clause_rule(p)) // for_if_clause @@ -21622,7 +21909,7 @@ _loop1_104_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_104[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_105[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "for_if_clause")); } if (_n == 0 || p->error_indicator) { @@ -21640,14 +21927,14 @@ _loop1_104_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_104_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_105_type, _seq); D(p->level--); return _seq; } -// _loop0_105: ('if' disjunction) +// _loop0_106: ('if' disjunction) static asdl_seq * -_loop0_105_rule(Parser *p) +_loop0_106_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21671,13 +21958,13 @@ _loop0_105_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_105[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_146_var; + D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); + void *_tmp_147_var; while ( - (_tmp_146_var = _tmp_146_rule(p)) // 'if' disjunction + (_tmp_147_var = _tmp_147_rule(p)) // 'if' disjunction ) { - _res = _tmp_146_var; + _res = _tmp_147_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21693,7 +21980,7 @@ _loop0_105_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_105[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_106[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -21706,14 +21993,14 @@ _loop0_105_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_105_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); D(p->level--); return _seq; } -// _loop0_106: ('if' disjunction) +// _loop0_107: ('if' disjunction) static asdl_seq * -_loop0_106_rule(Parser *p) +_loop0_107_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21737,13 +22024,13 @@ _loop0_106_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_147_var; + D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); + void *_tmp_148_var; while ( - (_tmp_147_var = _tmp_147_rule(p)) // 'if' disjunction + (_tmp_148_var = _tmp_148_rule(p)) // 'if' disjunction ) { - _res = _tmp_147_var; + _res = _tmp_148_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21759,7 +22046,7 @@ _loop0_106_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_106[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_107[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "('if' disjunction)")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -21772,14 +22059,14 @@ _loop0_106_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_106_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_107_type, _seq); D(p->level--); return _seq; } -// _tmp_107: ',' args +// _tmp_108: ',' args static void * -_tmp_107_rule(Parser *p) +_tmp_108_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21793,7 +22080,7 @@ _tmp_107_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); + D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); Token * _literal; expr_ty c; if ( @@ -21802,7 +22089,7 @@ _tmp_107_rule(Parser *p) (c = args_rule(p)) // args ) { - D(fprintf(stderr, "%*c+ _tmp_107[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); + D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -21812,7 +22099,7 @@ _tmp_107_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_107[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); } _res = NULL; @@ -21821,9 +22108,9 @@ _tmp_107_rule(Parser *p) return _res; } -// _tmp_108: ',' args +// _tmp_109: ',' args static void * -_tmp_108_rule(Parser *p) +_tmp_109_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21837,7 +22124,7 @@ _tmp_108_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_108[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); + D(fprintf(stderr, "%*c> _tmp_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' args")); Token * _literal; expr_ty c; if ( @@ -21846,7 +22133,7 @@ _tmp_108_rule(Parser *p) (c = args_rule(p)) // args ) { - D(fprintf(stderr, "%*c+ _tmp_108[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); + D(fprintf(stderr, "%*c+ _tmp_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' args")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -21856,7 +22143,7 @@ _tmp_108_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_108[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_109[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' args")); } _res = NULL; @@ -21865,9 +22152,9 @@ _tmp_108_rule(Parser *p) return _res; } -// _loop0_110: ',' kwarg_or_starred +// _loop0_111: ',' kwarg_or_starred static asdl_seq * -_loop0_110_rule(Parser *p) +_loop0_111_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21891,7 +22178,7 @@ _loop0_110_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); + D(fprintf(stderr, "%*c> _loop0_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -21922,7 +22209,7 @@ _loop0_110_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_110[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_111[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -21935,14 +22222,14 @@ _loop0_110_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_110_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_111_type, _seq); D(p->level--); return _seq; } -// _gather_109: kwarg_or_starred _loop0_110 +// _gather_110: kwarg_or_starred _loop0_111 static asdl_seq * -_gather_109_rule(Parser *p) +_gather_110_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -21951,27 +22238,27 @@ _gather_109_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_starred _loop0_110 + { // kwarg_or_starred _loop0_111 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_109[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); + D(fprintf(stderr, "%*c> _gather_110[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_111")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_110_rule(p)) // _loop0_110 + (seq = _loop0_111_rule(p)) // _loop0_111 ) { - D(fprintf(stderr, "%*c+ _gather_109[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_110")); + D(fprintf(stderr, "%*c+ _gather_110[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_111")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_109[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_110")); + D(fprintf(stderr, "%*c%s _gather_110[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_111")); } _res = NULL; done: @@ -21979,9 +22266,9 @@ _gather_109_rule(Parser *p) return _res; } -// _loop0_112: ',' kwarg_or_double_starred +// _loop0_113: ',' kwarg_or_double_starred static asdl_seq * -_loop0_112_rule(Parser *p) +_loop0_113_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22005,7 +22292,7 @@ _loop0_112_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + D(fprintf(stderr, "%*c> _loop0_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -22036,7 +22323,7 @@ _loop0_112_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_112[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_113[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22049,14 +22336,14 @@ _loop0_112_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_112_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_113_type, _seq); D(p->level--); return _seq; } -// _gather_111: kwarg_or_double_starred _loop0_112 +// _gather_112: kwarg_or_double_starred _loop0_113 static asdl_seq * -_gather_111_rule(Parser *p) +_gather_112_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22065,27 +22352,27 @@ _gather_111_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_double_starred _loop0_112 + { // kwarg_or_double_starred _loop0_113 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_111[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); + D(fprintf(stderr, "%*c> _gather_112[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_113")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_112_rule(p)) // _loop0_112 + (seq = _loop0_113_rule(p)) // _loop0_113 ) { - D(fprintf(stderr, "%*c+ _gather_111[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_112")); + D(fprintf(stderr, "%*c+ _gather_112[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_113")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_111[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_112")); + D(fprintf(stderr, "%*c%s _gather_112[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_113")); } _res = NULL; done: @@ -22093,9 +22380,9 @@ _gather_111_rule(Parser *p) return _res; } -// _loop0_114: ',' kwarg_or_starred +// _loop0_115: ',' kwarg_or_starred static asdl_seq * -_loop0_114_rule(Parser *p) +_loop0_115_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22119,7 +22406,7 @@ _loop0_114_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); + D(fprintf(stderr, "%*c> _loop0_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -22150,7 +22437,7 @@ _loop0_114_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_114[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_115[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22163,14 +22450,14 @@ _loop0_114_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_114_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_115_type, _seq); D(p->level--); return _seq; } -// _gather_113: kwarg_or_starred _loop0_114 +// _gather_114: kwarg_or_starred _loop0_115 static asdl_seq * -_gather_113_rule(Parser *p) +_gather_114_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22179,27 +22466,27 @@ _gather_113_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_starred _loop0_114 + { // kwarg_or_starred _loop0_115 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_113[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); + D(fprintf(stderr, "%*c> _gather_114[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_115")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_starred_rule(p)) // kwarg_or_starred && - (seq = _loop0_114_rule(p)) // _loop0_114 + (seq = _loop0_115_rule(p)) // _loop0_115 ) { - D(fprintf(stderr, "%*c+ _gather_113[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_114")); + D(fprintf(stderr, "%*c+ _gather_114[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_starred _loop0_115")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_113[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_114")); + D(fprintf(stderr, "%*c%s _gather_114[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_starred _loop0_115")); } _res = NULL; done: @@ -22207,9 +22494,9 @@ _gather_113_rule(Parser *p) return _res; } -// _loop0_116: ',' kwarg_or_double_starred +// _loop0_117: ',' kwarg_or_double_starred static asdl_seq * -_loop0_116_rule(Parser *p) +_loop0_117_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22233,7 +22520,7 @@ _loop0_116_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); + D(fprintf(stderr, "%*c> _loop0_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' kwarg_or_double_starred")); Token * _literal; KeywordOrStarred* elem; while ( @@ -22264,7 +22551,7 @@ _loop0_116_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_116[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_117[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' kwarg_or_double_starred")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22277,14 +22564,14 @@ _loop0_116_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_116_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); D(p->level--); return _seq; } -// _gather_115: kwarg_or_double_starred _loop0_116 +// _gather_116: kwarg_or_double_starred _loop0_117 static asdl_seq * -_gather_115_rule(Parser *p) +_gather_116_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22293,27 +22580,27 @@ _gather_115_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // kwarg_or_double_starred _loop0_116 + { // kwarg_or_double_starred _loop0_117 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_115[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); + D(fprintf(stderr, "%*c> _gather_116[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_117")); KeywordOrStarred* elem; asdl_seq * seq; if ( (elem = kwarg_or_double_starred_rule(p)) // kwarg_or_double_starred && - (seq = _loop0_116_rule(p)) // _loop0_116 + (seq = _loop0_117_rule(p)) // _loop0_117 ) { - D(fprintf(stderr, "%*c+ _gather_115[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_116")); + D(fprintf(stderr, "%*c+ _gather_116[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "kwarg_or_double_starred _loop0_117")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_115[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_116")); + D(fprintf(stderr, "%*c%s _gather_116[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "kwarg_or_double_starred _loop0_117")); } _res = NULL; done: @@ -22321,9 +22608,9 @@ _gather_115_rule(Parser *p) return _res; } -// _loop0_117: (',' star_target) +// _loop0_118: (',' star_target) static asdl_seq * -_loop0_117_rule(Parser *p) +_loop0_118_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22347,13 +22634,13 @@ _loop0_117_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_117[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_148_var; + D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); + void *_tmp_149_var; while ( - (_tmp_148_var = _tmp_148_rule(p)) // ',' star_target + (_tmp_149_var = _tmp_149_rule(p)) // ',' star_target ) { - _res = _tmp_148_var; + _res = _tmp_149_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22369,7 +22656,7 @@ _loop0_117_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_117[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_118[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(',' star_target)")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22382,14 +22669,14 @@ _loop0_117_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_117_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_118_type, _seq); D(p->level--); return _seq; } -// _loop0_119: ',' star_target +// _loop0_120: ',' star_target static asdl_seq * -_loop0_119_rule(Parser *p) +_loop0_120_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22413,7 +22700,7 @@ _loop0_119_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _loop0_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty elem; while ( @@ -22444,7 +22731,7 @@ _loop0_119_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_119[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_120[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22457,14 +22744,14 @@ _loop0_119_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_119_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_120_type, _seq); D(p->level--); return _seq; } -// _gather_118: star_target _loop0_119 +// _gather_119: star_target _loop0_120 static asdl_seq * -_gather_118_rule(Parser *p) +_gather_119_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22473,27 +22760,27 @@ _gather_118_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // star_target _loop0_119 + { // star_target _loop0_120 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); + D(fprintf(stderr, "%*c> _gather_119[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_target _loop0_120")); expr_ty elem; asdl_seq * seq; if ( (elem = star_target_rule(p)) // star_target && - (seq = _loop0_119_rule(p)) // _loop0_119 + (seq = _loop0_120_rule(p)) // _loop0_120 ) { - D(fprintf(stderr, "%*c+ _gather_118[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_119")); + D(fprintf(stderr, "%*c+ _gather_119[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_target _loop0_120")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_118[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_119")); + D(fprintf(stderr, "%*c%s _gather_119[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_target _loop0_120")); } _res = NULL; done: @@ -22501,9 +22788,9 @@ _gather_118_rule(Parser *p) return _res; } -// _tmp_120: !'*' star_target +// _tmp_121: !'*' star_target static void * -_tmp_120_rule(Parser *p) +_tmp_121_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22517,7 +22804,7 @@ _tmp_120_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_120[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + D(fprintf(stderr, "%*c> _tmp_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); expr_ty star_target_var; if ( _PyPegen_lookahead_with_int(0, _PyPegen_expect_token, p, 16) // token='*' @@ -22525,12 +22812,12 @@ _tmp_120_rule(Parser *p) (star_target_var = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_120[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); + D(fprintf(stderr, "%*c+ _tmp_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "!'*' star_target")); _res = star_target_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_120[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_121[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "!'*' star_target")); } _res = NULL; @@ -22539,9 +22826,9 @@ _tmp_120_rule(Parser *p) return _res; } -// _loop0_122: ',' del_target +// _loop0_123: ',' del_target static asdl_seq * -_loop0_122_rule(Parser *p) +_loop0_123_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22565,7 +22852,7 @@ _loop0_122_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); + D(fprintf(stderr, "%*c> _loop0_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' del_target")); Token * _literal; expr_ty elem; while ( @@ -22596,7 +22883,7 @@ _loop0_122_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_122[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_123[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' del_target")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22609,14 +22896,14 @@ _loop0_122_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_122_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_123_type, _seq); D(p->level--); return _seq; } -// _gather_121: del_target _loop0_122 +// _gather_122: del_target _loop0_123 static asdl_seq * -_gather_121_rule(Parser *p) +_gather_122_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22625,27 +22912,27 @@ _gather_121_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // del_target _loop0_122 + { // del_target _loop0_123 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_121[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); + D(fprintf(stderr, "%*c> _gather_122[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "del_target _loop0_123")); expr_ty elem; asdl_seq * seq; if ( (elem = del_target_rule(p)) // del_target && - (seq = _loop0_122_rule(p)) // _loop0_122 + (seq = _loop0_123_rule(p)) // _loop0_123 ) { - D(fprintf(stderr, "%*c+ _gather_121[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_122")); + D(fprintf(stderr, "%*c+ _gather_122[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "del_target _loop0_123")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_121[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_122")); + D(fprintf(stderr, "%*c%s _gather_122[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "del_target _loop0_123")); } _res = NULL; done: @@ -22653,9 +22940,9 @@ _gather_121_rule(Parser *p) return _res; } -// _loop0_124: ',' target +// _loop0_125: ',' target static asdl_seq * -_loop0_124_rule(Parser *p) +_loop0_125_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22679,7 +22966,7 @@ _loop0_124_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' target")); + D(fprintf(stderr, "%*c> _loop0_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' target")); Token * _literal; expr_ty elem; while ( @@ -22710,7 +22997,7 @@ _loop0_124_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_124[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_125[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' target")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22723,14 +23010,14 @@ _loop0_124_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_124_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_125_type, _seq); D(p->level--); return _seq; } -// _gather_123: target _loop0_124 +// _gather_124: target _loop0_125 static asdl_seq * -_gather_123_rule(Parser *p) +_gather_124_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22739,27 +23026,27 @@ _gather_123_rule(Parser *p) } asdl_seq * _res = NULL; int _mark = p->mark; - { // target _loop0_124 + { // target _loop0_125 if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _gather_123[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); + D(fprintf(stderr, "%*c> _gather_124[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "target _loop0_125")); expr_ty elem; asdl_seq * seq; if ( (elem = target_rule(p)) // target && - (seq = _loop0_124_rule(p)) // _loop0_124 + (seq = _loop0_125_rule(p)) // _loop0_125 ) { - D(fprintf(stderr, "%*c+ _gather_123[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "target _loop0_124")); + D(fprintf(stderr, "%*c+ _gather_124[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "target _loop0_125")); _res = _PyPegen_seq_insert_in_front(p, elem, seq); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _gather_123[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "target _loop0_124")); + D(fprintf(stderr, "%*c%s _gather_124[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "target _loop0_125")); } _res = NULL; done: @@ -22767,9 +23054,9 @@ _gather_123_rule(Parser *p) return _res; } -// _tmp_125: args | expression for_if_clauses +// _tmp_126: args | expression for_if_clauses static void * -_tmp_125_rule(Parser *p) +_tmp_126_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22783,18 +23070,18 @@ _tmp_125_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); + D(fprintf(stderr, "%*c> _tmp_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "args")); expr_ty args_var; if ( (args_var = args_rule(p)) // args ) { - D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); + D(fprintf(stderr, "%*c+ _tmp_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "args")); _res = args_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_126[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "args")); } { // expression for_if_clauses @@ -22802,7 +23089,7 @@ _tmp_125_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_125[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + D(fprintf(stderr, "%*c> _tmp_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); expr_ty expression_var; asdl_seq* for_if_clauses_var; if ( @@ -22811,12 +23098,12 @@ _tmp_125_rule(Parser *p) (for_if_clauses_var = for_if_clauses_rule(p)) // for_if_clauses ) { - D(fprintf(stderr, "%*c+ _tmp_125[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); + D(fprintf(stderr, "%*c+ _tmp_126[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression for_if_clauses")); _res = _PyPegen_dummy_name(p, expression_var, for_if_clauses_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_125[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_126[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression for_if_clauses")); } _res = NULL; @@ -22825,9 +23112,9 @@ _tmp_125_rule(Parser *p) return _res; } -// _loop0_126: star_named_expressions +// _loop0_127: star_named_expressions static asdl_seq * -_loop0_126_rule(Parser *p) +_loop0_127_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22851,7 +23138,7 @@ _loop0_126_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_126[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); + D(fprintf(stderr, "%*c> _loop0_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expressions")); asdl_seq* star_named_expressions_var; while ( (star_named_expressions_var = star_named_expressions_rule(p)) // star_named_expressions @@ -22873,7 +23160,7 @@ _loop0_126_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_126[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_127[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expressions")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22886,14 +23173,14 @@ _loop0_126_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_126_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_127_type, _seq); D(p->level--); return _seq; } -// _tmp_127: '=' annotated_rhs +// _tmp_128: '=' annotated_rhs static void * -_tmp_127_rule(Parser *p) +_tmp_128_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22907,7 +23194,7 @@ _tmp_127_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_127[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); Token * _literal; expr_ty annotated_rhs_var; if ( @@ -22916,12 +23203,12 @@ _tmp_127_rule(Parser *p) (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs ) { - D(fprintf(stderr, "%*c+ _tmp_127[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); + D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_127[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); } _res = NULL; @@ -22930,9 +23217,9 @@ _tmp_127_rule(Parser *p) return _res; } -// _loop0_128: (star_targets '=') +// _loop0_129: (star_targets '=') static asdl_seq * -_loop0_128_rule(Parser *p) +_loop0_129_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -22956,13 +23243,13 @@ _loop0_128_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_149_var; + D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_150_var; while ( - (_tmp_149_var = _tmp_149_rule(p)) // star_targets '=' + (_tmp_150_var = _tmp_150_rule(p)) // star_targets '=' ) { - _res = _tmp_149_var; + _res = _tmp_150_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22978,7 +23265,7 @@ _loop0_128_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -22991,14 +23278,14 @@ _loop0_128_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_128_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_129_type, _seq); D(p->level--); return _seq; } -// _loop0_129: (star_targets '=') +// _loop0_130: (star_targets '=') static asdl_seq * -_loop0_129_rule(Parser *p) +_loop0_130_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23022,13 +23309,13 @@ _loop0_129_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_150_var; + D(fprintf(stderr, "%*c> _loop0_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_151_var; while ( - (_tmp_150_var = _tmp_150_rule(p)) // star_targets '=' + (_tmp_151_var = _tmp_151_rule(p)) // star_targets '=' ) { - _res = _tmp_150_var; + _res = _tmp_151_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -23044,7 +23331,7 @@ _loop0_129_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_130[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23057,14 +23344,14 @@ _loop0_129_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_129_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_130_type, _seq); D(p->level--); return _seq; } -// _tmp_130: yield_expr | star_expressions +// _tmp_131: yield_expr | star_expressions static void * -_tmp_130_rule(Parser *p) +_tmp_131_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23078,18 +23365,18 @@ _tmp_130_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -23097,18 +23384,18 @@ _tmp_130_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -23117,9 +23404,9 @@ _tmp_130_rule(Parser *p) return _res; } -// _tmp_131: '[' | '(' | '{' +// _tmp_132: '[' | '(' | '{' static void * -_tmp_131_rule(Parser *p) +_tmp_132_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23133,18 +23420,18 @@ _tmp_131_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' @@ -23152,18 +23439,18 @@ _tmp_131_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' @@ -23171,18 +23458,18 @@ _tmp_131_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -23191,9 +23478,9 @@ _tmp_131_rule(Parser *p) return _res; } -// _loop0_132: param_no_default +// _loop0_133: param_no_default static asdl_seq * -_loop0_132_rule(Parser *p) +_loop0_133_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23217,7 +23504,7 @@ _loop0_132_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -23239,7 +23526,7 @@ _loop0_132_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_133[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23252,14 +23539,14 @@ _loop0_132_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_132_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_133_type, _seq); D(p->level--); return _seq; } -// _tmp_133: slash_with_default | param_with_default+ +// _tmp_134: slash_with_default | param_with_default+ static void * -_tmp_133_rule(Parser *p) +_tmp_134_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23273,18 +23560,18 @@ _tmp_133_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } { // param_with_default+ @@ -23292,18 +23579,18 @@ _tmp_133_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_151_var; + D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + asdl_seq * _loop1_152_var; if ( - (_loop1_151_var = _loop1_151_rule(p)) // param_with_default+ + (_loop1_152_var = _loop1_152_rule(p)) // param_with_default+ ) { - D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_151_var; + D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + _res = _loop1_152_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+")); } _res = NULL; @@ -23312,9 +23599,9 @@ _tmp_133_rule(Parser *p) return _res; } -// _loop0_134: lambda_param_no_default +// _loop0_135: lambda_param_no_default static asdl_seq * -_loop0_134_rule(Parser *p) +_loop0_135_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23338,7 +23625,7 @@ _loop0_134_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -23360,7 +23647,7 @@ _loop0_134_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_135[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23373,14 +23660,14 @@ _loop0_134_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_134_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_135_type, _seq); D(p->level--); return _seq; } -// _tmp_135: lambda_slash_with_default | lambda_param_with_default+ +// _tmp_136: lambda_slash_with_default | lambda_param_with_default+ static void * -_tmp_135_rule(Parser *p) +_tmp_136_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23394,18 +23681,18 @@ _tmp_135_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } { // lambda_param_with_default+ @@ -23413,18 +23700,18 @@ _tmp_135_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - asdl_seq * _loop1_152_var; + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + asdl_seq * _loop1_153_var; if ( - (_loop1_152_var = _loop1_152_rule(p)) // lambda_param_with_default+ + (_loop1_153_var = _loop1_153_rule(p)) // lambda_param_with_default+ ) { - D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - _res = _loop1_152_var; + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + _res = _loop1_153_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+")); } _res = NULL; @@ -23433,9 +23720,9 @@ _tmp_135_rule(Parser *p) return _res; } -// _tmp_136: ')' | ',' (')' | '**') +// _tmp_137: ')' | ',' (')' | '**') static void * -_tmp_136_rule(Parser *p) +_tmp_137_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23449,18 +23736,18 @@ _tmp_136_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -23468,21 +23755,21 @@ _tmp_136_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_153_var; + void *_tmp_154_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_153_var = _tmp_153_rule(p)) // ')' | '**' + (_tmp_154_var = _tmp_154_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_153_var); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_154_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -23491,9 +23778,9 @@ _tmp_136_rule(Parser *p) return _res; } -// _tmp_137: ':' | ',' (':' | '**') +// _tmp_138: ':' | ',' (':' | '**') static void * -_tmp_137_rule(Parser *p) +_tmp_138_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23507,18 +23794,18 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -23526,21 +23813,21 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_154_var; + void *_tmp_155_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_154_var = _tmp_154_rule(p)) // ':' | '**' + (_tmp_155_var = _tmp_155_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_154_var); + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_155_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -23549,9 +23836,9 @@ _tmp_137_rule(Parser *p) return _res; } -// _tmp_138: star_targets '=' +// _tmp_139: star_targets '=' static void * -_tmp_138_rule(Parser *p) +_tmp_139_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23565,7 +23852,7 @@ _tmp_138_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -23574,7 +23861,7 @@ _tmp_138_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23584,7 +23871,7 @@ _tmp_138_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -23593,9 +23880,9 @@ _tmp_138_rule(Parser *p) return _res; } -// _tmp_139: '.' | '...' +// _tmp_140: '.' | '...' static void * -_tmp_139_rule(Parser *p) +_tmp_140_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23609,18 +23896,18 @@ _tmp_139_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23628,18 +23915,18 @@ _tmp_139_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23648,9 +23935,9 @@ _tmp_139_rule(Parser *p) return _res; } -// _tmp_140: '.' | '...' +// _tmp_141: '.' | '...' static void * -_tmp_140_rule(Parser *p) +_tmp_141_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23664,18 +23951,18 @@ _tmp_140_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23683,18 +23970,18 @@ _tmp_140_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23703,9 +23990,9 @@ _tmp_140_rule(Parser *p) return _res; } -// _tmp_141: '@' named_expression NEWLINE +// _tmp_142: '@' named_expression NEWLINE static void * -_tmp_141_rule(Parser *p) +_tmp_142_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23719,7 +24006,7 @@ _tmp_141_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -23731,7 +24018,7 @@ _tmp_141_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23741,7 +24028,7 @@ _tmp_141_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -23750,9 +24037,9 @@ _tmp_141_rule(Parser *p) return _res; } -// _tmp_142: ',' star_expression +// _tmp_143: ',' star_expression static void * -_tmp_142_rule(Parser *p) +_tmp_143_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23766,7 +24053,7 @@ _tmp_142_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -23775,7 +24062,7 @@ _tmp_142_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23785,7 +24072,7 @@ _tmp_142_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -23794,9 +24081,9 @@ _tmp_142_rule(Parser *p) return _res; } -// _tmp_143: ',' expression +// _tmp_144: ',' expression static void * -_tmp_143_rule(Parser *p) +_tmp_144_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23810,7 +24097,7 @@ _tmp_143_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -23819,7 +24106,7 @@ _tmp_143_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23829,7 +24116,7 @@ _tmp_143_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -23838,9 +24125,9 @@ _tmp_143_rule(Parser *p) return _res; } -// _tmp_144: 'or' conjunction +// _tmp_145: 'or' conjunction static void * -_tmp_144_rule(Parser *p) +_tmp_145_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23854,7 +24141,7 @@ _tmp_144_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -23863,7 +24150,7 @@ _tmp_144_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23873,7 +24160,7 @@ _tmp_144_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -23882,9 +24169,9 @@ _tmp_144_rule(Parser *p) return _res; } -// _tmp_145: 'and' inversion +// _tmp_146: 'and' inversion static void * -_tmp_145_rule(Parser *p) +_tmp_146_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23898,7 +24185,7 @@ _tmp_145_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -23907,7 +24194,7 @@ _tmp_145_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23917,7 +24204,7 @@ _tmp_145_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -23926,9 +24213,9 @@ _tmp_145_rule(Parser *p) return _res; } -// _tmp_146: 'if' disjunction +// _tmp_147: 'if' disjunction static void * -_tmp_146_rule(Parser *p) +_tmp_147_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23942,7 +24229,7 @@ _tmp_146_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -23951,7 +24238,7 @@ _tmp_146_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23961,7 +24248,7 @@ _tmp_146_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -23970,9 +24257,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: 'if' disjunction +// _tmp_148: 'if' disjunction static void * -_tmp_147_rule(Parser *p) +_tmp_148_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23986,7 +24273,7 @@ _tmp_147_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -23995,7 +24282,7 @@ _tmp_147_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24005,7 +24292,7 @@ _tmp_147_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -24014,9 +24301,9 @@ _tmp_147_rule(Parser *p) return _res; } -// _tmp_148: ',' star_target +// _tmp_149: ',' star_target static void * -_tmp_148_rule(Parser *p) +_tmp_149_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24030,7 +24317,7 @@ _tmp_148_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -24039,7 +24326,7 @@ _tmp_148_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24049,7 +24336,7 @@ _tmp_148_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -24058,9 +24345,9 @@ _tmp_148_rule(Parser *p) return _res; } -// _tmp_149: star_targets '=' +// _tmp_150: star_targets '=' static void * -_tmp_149_rule(Parser *p) +_tmp_150_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24074,7 +24361,7 @@ _tmp_149_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -24083,12 +24370,12 @@ _tmp_149_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -24097,9 +24384,9 @@ _tmp_149_rule(Parser *p) return _res; } -// _tmp_150: star_targets '=' +// _tmp_151: star_targets '=' static void * -_tmp_150_rule(Parser *p) +_tmp_151_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24113,7 +24400,7 @@ _tmp_150_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -24122,12 +24409,12 @@ _tmp_150_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -24136,9 +24423,9 @@ _tmp_150_rule(Parser *p) return _res; } -// _loop1_151: param_with_default +// _loop1_152: param_with_default static asdl_seq * -_loop1_151_rule(Parser *p) +_loop1_152_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24162,7 +24449,7 @@ _loop1_151_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -24184,7 +24471,7 @@ _loop1_151_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -24202,14 +24489,14 @@ _loop1_151_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_151_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_152_type, _seq); D(p->level--); return _seq; } -// _loop1_152: lambda_param_with_default +// _loop1_153: lambda_param_with_default static asdl_seq * -_loop1_152_rule(Parser *p) +_loop1_153_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24233,7 +24520,7 @@ _loop1_152_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -24255,7 +24542,7 @@ _loop1_152_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -24273,14 +24560,14 @@ _loop1_152_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_152_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_153_type, _seq); D(p->level--); return _seq; } -// _tmp_153: ')' | '**' +// _tmp_154: ')' | '**' static void * -_tmp_153_rule(Parser *p) +_tmp_154_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24294,18 +24581,18 @@ _tmp_153_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -24313,18 +24600,18 @@ _tmp_153_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -24333,9 +24620,9 @@ _tmp_153_rule(Parser *p) return _res; } -// _tmp_154: ':' | '**' +// _tmp_155: ':' | '**' static void * -_tmp_154_rule(Parser *p) +_tmp_155_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24349,18 +24636,18 @@ _tmp_154_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -24368,18 +24655,18 @@ _tmp_154_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; diff --git a/Parser/pegen.c b/Parser/pegen.c index e153e924e9311..b374740308a36 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -380,7 +380,6 @@ _PyPegen_raise_error(Parser *p, PyObject *errtype, const char *errmsg, ...) return NULL; } - void * _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, Py_ssize_t lineno, Py_ssize_t col_offset, @@ -2086,7 +2085,7 @@ _PyPegen_make_module(Parser *p, asdl_seq *a) { // Error reporting helpers expr_ty -_PyPegen_get_invalid_target(expr_ty e) +_PyPegen_get_invalid_target(expr_ty e, TARGETS_TYPE targets_type) { if (e == NULL) { return NULL; @@ -2096,7 +2095,7 @@ _PyPegen_get_invalid_target(expr_ty e) Py_ssize_t len = asdl_seq_LEN(CONTAINER->v.TYPE.elts);\ for (Py_ssize_t i = 0; i < len; i++) {\ expr_ty other = asdl_seq_GET(CONTAINER->v.TYPE.elts, i);\ - expr_ty child = _PyPegen_get_invalid_target(other);\ + expr_ty child = _PyPegen_get_invalid_target(other, targets_type);\ if (child != NULL) {\ return child;\ }\ @@ -2110,16 +2109,29 @@ _PyPegen_get_invalid_target(expr_ty e) // we don't need to visit it recursively. switch (e->kind) { - case List_kind: { + case List_kind: VISIT_CONTAINER(e, List); return NULL; - } - case Tuple_kind: { + case Tuple_kind: VISIT_CONTAINER(e, Tuple); return NULL; - } case Starred_kind: - return _PyPegen_get_invalid_target(e->v.Starred.value); + if (targets_type == DEL_TARGETS) { + return e; + } + return _PyPegen_get_invalid_target(e->v.Starred.value, targets_type); + case Compare_kind: + // This is needed, because the `a in b` in `for a in b` gets parsed + // as a comparison, and so we need to search the left side of the comparison + // for invalid targets. + if (targets_type == FOR_TARGETS) { + cmpop_ty cmpop = (cmpop_ty) asdl_seq_GET(e->v.Compare.ops, 0); + if (cmpop == In) { + return _PyPegen_get_invalid_target(e->v.Compare.left, targets_type); + } + return NULL; + } + return e; case Name_kind: case Subscript_kind: case Attribute_kind: diff --git a/Parser/pegen.h b/Parser/pegen.h index c4ff8c9d51252..43168074c3500 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -263,11 +263,21 @@ int _PyPegen_check_barry_as_flufl(Parser *); mod_ty _PyPegen_make_module(Parser *, asdl_seq *); // Error reporting helpers -expr_ty _PyPegen_get_invalid_target(expr_ty e); +typedef enum { + STAR_TARGETS, + DEL_TARGETS, + FOR_TARGETS +} TARGETS_TYPE; +expr_ty _PyPegen_get_invalid_target(expr_ty e, TARGETS_TYPE targets_type); +#define GET_INVALID_TARGET(e) (expr_ty)CHECK(_PyPegen_get_invalid_target(e, STAR_TARGETS)) +#define GET_INVALID_DEL_TARGET(e) (expr_ty)CHECK_NULL_ALLOWED(_PyPegen_get_invalid_target(e, DEL_TARGETS)) +#define GET_INVALID_FOR_TARGET(e) (expr_ty)CHECK_NULL_ALLOWED(_PyPegen_get_invalid_target(e, FOR_TARGETS)) + void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); void *_PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args); +// Generated function in parse.c - function definition in python.gram void *_PyPegen_parse(Parser *); #endif From webhook-mailer at python.org Fri Jun 19 05:45:40 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 19 Jun 2020 09:45:40 -0000 Subject: [Python-checkins] bpo-40943: PY_SSIZE_T_CLEAN required for '#' formats (GH-20784) Message-ID: https://github.com/python/cpython/commit/37bb2895561d3e63a631f10875567b4e33b30c07 commit: 37bb2895561d3e63a631f10875567b4e33b30c07 branch: master author: Victor Stinner committer: GitHub date: 2020-06-19T11:45:31+02:00 summary: bpo-40943: PY_SSIZE_T_CLEAN required for '#' formats (GH-20784) The PY_SSIZE_T_CLEAN macro must now be defined to use PyArg_ParseTuple() and Py_BuildValue() "#" formats: "es#", "et#", "s#", "u#", "y#", "z#", "U#" and "Z#". See the PEP 353. Update _testcapi.test_buildvalue_issue38913(). files: A Misc/NEWS.d/next/C API/2020-06-10-18-37-26.bpo-40943.i4q7rK.rst M Doc/c-api/arg.rst M Doc/whatsnew/3.10.rst M Modules/_testcapimodule.c M Python/getargs.c M Python/modsupport.c diff --git a/Doc/c-api/arg.rst b/Doc/c-api/arg.rst index b7baad589a72c..26e872c5a348e 100644 --- a/Doc/c-api/arg.rst +++ b/Doc/c-api/arg.rst @@ -55,13 +55,11 @@ which disallows mutable objects such as :class:`bytearray`. .. note:: - For all ``#`` variants of formats (``s#``, ``y#``, etc.), the type of - the length argument (int or :c:type:`Py_ssize_t`) is controlled by - defining the macro :c:macro:`PY_SSIZE_T_CLEAN` before including - :file:`Python.h`. If the macro was defined, length is a - :c:type:`Py_ssize_t` rather than an :c:type:`int`. This behavior will change - in a future Python version to only support :c:type:`Py_ssize_t` and - drop :c:type:`int` support. It is best to always define :c:macro:`PY_SSIZE_T_CLEAN`. + For all ``#`` variants of formats (``s#``, ``y#``, etc.), the macro + :c:macro:`PY_SSIZE_T_CLEAN` must be defined before including + :file:`Python.h`. On Python 3.9 and older, the type of the length argument + is :c:type:`Py_ssize_t` if the :c:macro:`PY_SSIZE_T_CLEAN` macro is defined, + or int otherwise. ``s`` (:class:`str`) [const char \*] @@ -90,7 +88,7 @@ which disallows mutable objects such as :class:`bytearray`. In this case the resulting C string may contain embedded NUL bytes. Unicode objects are converted to C strings using ``'utf-8'`` encoding. -``s#`` (:class:`str`, read-only :term:`bytes-like object`) [const char \*, int or :c:type:`Py_ssize_t`] +``s#`` (:class:`str`, read-only :term:`bytes-like object`) [const char \*, :c:type:`Py_ssize_t`] Like ``s*``, except that it doesn't accept mutable objects. The result is stored into two C variables, the first one a pointer to a C string, the second one its length. @@ -105,7 +103,7 @@ which disallows mutable objects such as :class:`bytearray`. Like ``s*``, but the Python object may also be ``None``, in which case the ``buf`` member of the :c:type:`Py_buffer` structure is set to ``NULL``. -``z#`` (:class:`str`, read-only :term:`bytes-like object` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`] +``z#`` (:class:`str`, read-only :term:`bytes-like object` or ``None``) [const char \*, :c:type:`Py_ssize_t`] Like ``s#``, but the Python object may also be ``None``, in which case the C pointer is set to ``NULL``. @@ -124,7 +122,7 @@ which disallows mutable objects such as :class:`bytearray`. bytes-like objects. **This is the recommended way to accept binary data.** -``y#`` (read-only :term:`bytes-like object`) [const char \*, int or :c:type:`Py_ssize_t`] +``y#`` (read-only :term:`bytes-like object`) [const char \*, :c:type:`Py_ssize_t`] This variant on ``s#`` doesn't accept Unicode objects, only bytes-like objects. @@ -155,7 +153,7 @@ which disallows mutable objects such as :class:`bytearray`. Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using :c:func:`PyUnicode_AsWideCharString`. -``u#`` (:class:`str`) [const Py_UNICODE \*, int or :c:type:`Py_ssize_t`] +``u#`` (:class:`str`) [const Py_UNICODE \*, :c:type:`Py_ssize_t`] This variant on ``u`` stores into two C variables, the first one a pointer to a Unicode data buffer, the second one its length. This variant allows null code points. @@ -172,7 +170,7 @@ which disallows mutable objects such as :class:`bytearray`. Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using :c:func:`PyUnicode_AsWideCharString`. -``Z#`` (:class:`str` or ``None``) [const Py_UNICODE \*, int or :c:type:`Py_ssize_t`] +``Z#`` (:class:`str` or ``None``) [const Py_UNICODE \*, :c:type:`Py_ssize_t`] Like ``u#``, but the Python object may also be ``None``, in which case the :c:type:`Py_UNICODE` pointer is set to ``NULL``. @@ -213,7 +211,7 @@ which disallows mutable objects such as :class:`bytearray`. recoding them. Instead, the implementation assumes that the byte string object uses the encoding passed in as parameter. -``es#`` (:class:`str`) [const char \*encoding, char \*\*buffer, int or :c:type:`Py_ssize_t` \*buffer_length] +``es#`` (:class:`str`) [const char \*encoding, char \*\*buffer, :c:type:`Py_ssize_t` \*buffer_length] This variant on ``s#`` is used for encoding Unicode into a character buffer. Unlike the ``es`` format, this variant allows input data which contains NUL characters. @@ -244,7 +242,7 @@ which disallows mutable objects such as :class:`bytearray`. In both cases, *\*buffer_length* is set to the length of the encoded data without the trailing NUL byte. -``et#`` (:class:`str`, :class:`bytes` or :class:`bytearray`) [const char \*encoding, char \*\*buffer, int or :c:type:`Py_ssize_t` \*buffer_length] +``et#`` (:class:`str`, :class:`bytes` or :class:`bytearray`) [const char \*encoding, char \*\*buffer, :c:type:`Py_ssize_t` \*buffer_length] Same as ``es#`` except that byte string objects are passed through without recoding them. Instead, the implementation assumes that the byte string object uses the encoding passed in as parameter. @@ -549,7 +547,7 @@ Building values Convert a null-terminated C string to a Python :class:`str` object using ``'utf-8'`` encoding. If the C string pointer is ``NULL``, ``None`` is used. - ``s#`` (:class:`str` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`] + ``s#`` (:class:`str` or ``None``) [const char \*, :c:type:`Py_ssize_t`] Convert a C string and its length to a Python :class:`str` object using ``'utf-8'`` encoding. If the C string pointer is ``NULL``, the length is ignored and ``None`` is returned. @@ -558,14 +556,14 @@ Building values This converts a C string to a Python :class:`bytes` object. If the C string pointer is ``NULL``, ``None`` is returned. - ``y#`` (:class:`bytes`) [const char \*, int or :c:type:`Py_ssize_t`] + ``y#`` (:class:`bytes`) [const char \*, :c:type:`Py_ssize_t`] This converts a C string and its lengths to a Python object. If the C string pointer is ``NULL``, ``None`` is returned. ``z`` (:class:`str` or ``None``) [const char \*] Same as ``s``. - ``z#`` (:class:`str` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`] + ``z#`` (:class:`str` or ``None``) [const char \*, :c:type:`Py_ssize_t`] Same as ``s#``. ``u`` (:class:`str`) [const wchar_t \*] @@ -573,7 +571,7 @@ Building values data to a Python Unicode object. If the Unicode buffer pointer is ``NULL``, ``None`` is returned. - ``u#`` (:class:`str`) [const wchar_t \*, int or :c:type:`Py_ssize_t`] + ``u#`` (:class:`str`) [const wchar_t \*, :c:type:`Py_ssize_t`] Convert a Unicode (UTF-16 or UCS-4) data buffer and its length to a Python Unicode object. If the Unicode buffer pointer is ``NULL``, the length is ignored and ``None`` is returned. @@ -581,7 +579,7 @@ Building values ``U`` (:class:`str` or ``None``) [const char \*] Same as ``s``. - ``U#`` (:class:`str` or ``None``) [const char \*, int or :c:type:`Py_ssize_t`] + ``U#`` (:class:`str` or ``None``) [const char \*, :c:type:`Py_ssize_t`] Same as ``s#``. ``i`` (:class:`int`) [int] diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 566827bf90ff3..9c1dca1152a64 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -155,6 +155,13 @@ New Features Porting to Python 3.10 ---------------------- +* The ``PY_SSIZE_T_CLEAN`` macro must now be defined to use + :c:func:`PyArg_ParseTuple` and :c:func:`Py_BuildValue` formats which use + ``#``: ``es#``, ``et#``, ``s#``, ``u#``, ``y#``, ``z#``, ``U#`` and ``Z#``. + See :ref:`Parsing arguments and building values + ` and the :pep:`353`. + (Contributed by Victor Stinner in :issue:`40943`.) + * Since :c:func:`Py_TYPE()` is changed to the inline static function, ``Py_TYPE(obj) = new_type`` must be replaced with ``Py_SET_TYPE(obj, new_type)``: see :c:func:`Py_SET_TYPE()` (available since Python 3.9). For backward diff --git a/Misc/NEWS.d/next/C API/2020-06-10-18-37-26.bpo-40943.i4q7rK.rst b/Misc/NEWS.d/next/C API/2020-06-10-18-37-26.bpo-40943.i4q7rK.rst new file mode 100644 index 0000000000000..360ddae34cb96 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-10-18-37-26.bpo-40943.i4q7rK.rst @@ -0,0 +1,5 @@ +The ``PY_SSIZE_T_CLEAN`` macro must now be defined to use +:c:func:`PyArg_ParseTuple` and :c:func:`Py_BuildValue` formats which use ``#``: +``es#``, ``et#``, ``s#``, ``u#``, ``y#``, ``z#``, ``U#`` and ``Z#``. +See :ref:`Parsing arguments and building values ` and the +:pep:`353`. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index 5302641a9a37e..808483ebd7bf4 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -6868,29 +6868,36 @@ test_buildvalue_issue38913(PyObject *self, PyObject *Py_UNUSED(ignored)) PyObject *res; const char str[] = "string"; const Py_UNICODE unicode[] = L"unicode"; - PyErr_SetNone(PyExc_ZeroDivisionError); + assert(!PyErr_Occurred()); res = Py_BuildValue("(s#O)", str, 1, Py_None); assert(res == NULL); - if (!PyErr_ExceptionMatches(PyExc_ZeroDivisionError)) { + if (!PyErr_ExceptionMatches(PyExc_SystemError)) { return NULL; } + PyErr_Clear(); + res = Py_BuildValue("(z#O)", str, 1, Py_None); assert(res == NULL); - if (!PyErr_ExceptionMatches(PyExc_ZeroDivisionError)) { + if (!PyErr_ExceptionMatches(PyExc_SystemError)) { return NULL; } + PyErr_Clear(); + res = Py_BuildValue("(y#O)", str, 1, Py_None); assert(res == NULL); - if (!PyErr_ExceptionMatches(PyExc_ZeroDivisionError)) { + if (!PyErr_ExceptionMatches(PyExc_SystemError)) { return NULL; } + PyErr_Clear(); + res = Py_BuildValue("(u#O)", unicode, 1, Py_None); assert(res == NULL); - if (!PyErr_ExceptionMatches(PyExc_ZeroDivisionError)) { + if (!PyErr_ExceptionMatches(PyExc_SystemError)) { return NULL; } - PyErr_Clear(); + + Py_RETURN_NONE; } diff --git a/Python/getargs.c b/Python/getargs.c index cf0cc0783687a..aaf687a46b7f6 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -656,27 +656,12 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, char *msgbuf, size_t bufsize, freelist_t *freelist) { /* For # codes */ -#define FETCH_SIZE int *q=NULL;Py_ssize_t *q2=NULL;\ - if (flags & FLAG_SIZE_T) q2=va_arg(*p_va, Py_ssize_t*); \ - else { \ - if (PyErr_WarnEx(PyExc_DeprecationWarning, \ - "PY_SSIZE_T_CLEAN will be required for '#' formats", 1)) { \ - return NULL; \ - } \ - q=va_arg(*p_va, int*); \ - } -#define STORE_SIZE(s) \ - if (flags & FLAG_SIZE_T) \ - *q2=s; \ - else { \ - if (INT_MAX < s) { \ - PyErr_SetString(PyExc_OverflowError, \ - "size does not fit in an int"); \ - return converterr("", arg, msgbuf, bufsize); \ - } \ - *q = (int)s; \ - } -#define BUFFER_LEN ((flags & FLAG_SIZE_T) ? *q2:*q) +#define REQUIRE_PY_SSIZE_T_CLEAN \ + if (!(flags & FLAG_SIZE_T)) { \ + PyErr_SetString(PyExc_SystemError, \ + "PY_SSIZE_T_CLEAN macro must be defined for '#' formats"); \ + return NULL; \ + } #define RETURN_ERR_OCCURRED return msgbuf const char *format = *p_format; @@ -931,8 +916,9 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, if (count < 0) return converterr(buf, arg, msgbuf, bufsize); if (*format == '#') { - FETCH_SIZE; - STORE_SIZE(count); + REQUIRE_PY_SSIZE_T_CLEAN; + Py_ssize_t *psize = va_arg(*p_va, Py_ssize_t*); + *psize = count; format++; } else { if (strlen(*p) != (size_t)count) { @@ -974,11 +960,12 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, } else if (*format == '#') { /* a string or read-only bytes-like object */ /* "s#" or "z#" */ const void **p = (const void **)va_arg(*p_va, const char **); - FETCH_SIZE; + REQUIRE_PY_SSIZE_T_CLEAN; + Py_ssize_t *psize = va_arg(*p_va, Py_ssize_t*); if (c == 'z' && arg == Py_None) { *p = NULL; - STORE_SIZE(0); + *psize = 0; } else if (PyUnicode_Check(arg)) { Py_ssize_t len; @@ -987,7 +974,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, return converterr(CONV_UNICODE, arg, msgbuf, bufsize); *p = sarg; - STORE_SIZE(len); + *psize = len; } else { /* read-only bytes-like object */ /* XXX Really? */ @@ -995,7 +982,7 @@ convertsimple(PyObject *arg, const char **p_format, va_list *p_va, int flags, Py_ssize_t count = convertbuffer(arg, p, &buf); if (count < 0) return converterr(buf, arg, msgbuf, bufsize); - STORE_SIZE(count); + *psize = count; } format++; } else { @@ -1034,18 +1021,19 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS if (*format == '#') { /* "u#" or "Z#" */ - FETCH_SIZE; + REQUIRE_PY_SSIZE_T_CLEAN; + Py_ssize_t *psize = va_arg(*p_va, Py_ssize_t*); if (c == 'Z' && arg == Py_None) { *p = NULL; - STORE_SIZE(0); + *psize = 0; } else if (PyUnicode_Check(arg)) { Py_ssize_t len; *p = PyUnicode_AsUnicodeAndSize(arg, &len); if (*p == NULL) RETURN_ERR_OCCURRED; - STORE_SIZE(len); + *psize = len; } else return converterr(c == 'Z' ? "str or None" : "str", @@ -1160,22 +1148,11 @@ _Py_COMP_DIAG_POP trailing 0-byte */ - int *q = NULL; Py_ssize_t *q2 = NULL; - if (flags & FLAG_SIZE_T) { - q2 = va_arg(*p_va, Py_ssize_t*); - } - else { - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "PY_SSIZE_T_CLEAN will be required for '#' formats", 1)) - { - Py_DECREF(s); - return NULL; - } - q = va_arg(*p_va, int*); - } + REQUIRE_PY_SSIZE_T_CLEAN; + Py_ssize_t *psize = va_arg(*p_va, Py_ssize_t*); format++; - if (q == NULL && q2 == NULL) { + if (psize == NULL) { Py_DECREF(s); return converterr( "(buffer_len is NULL)", @@ -1195,30 +1172,20 @@ _Py_COMP_DIAG_POP arg, msgbuf, bufsize); } } else { - if (size + 1 > BUFFER_LEN) { + if (size + 1 > *psize) { Py_DECREF(s); PyErr_Format(PyExc_ValueError, "encoded string too long " "(%zd, maximum length %zd)", - (Py_ssize_t)size, (Py_ssize_t)(BUFFER_LEN-1)); + (Py_ssize_t)size, (Py_ssize_t)(*psize - 1)); RETURN_ERR_OCCURRED; } } memcpy(*buffer, ptr, size+1); - if (flags & FLAG_SIZE_T) { - *q2 = size; - } - else { - if (INT_MAX < size) { - Py_DECREF(s); - PyErr_SetString(PyExc_OverflowError, - "size does not fit in an int"); - return converterr("", arg, msgbuf, bufsize); - } - *q = (int)size; - } - } else { + *psize = size; + } + else { /* Using a 0-terminated buffer: - the encoded string has to be 0-terminated @@ -1356,9 +1323,7 @@ _Py_COMP_DIAG_POP *p_format = format; return NULL; -#undef FETCH_SIZE -#undef STORE_SIZE -#undef BUFFER_LEN +#undef REQUIRE_PY_SSIZE_T_CLEAN #undef RETURN_ERR_OCCURRED } diff --git a/Python/modsupport.c b/Python/modsupport.c index 845bdcb2b6f1b..2637039d4a151 100644 --- a/Python/modsupport.c +++ b/Python/modsupport.c @@ -283,6 +283,13 @@ do_mktuple(const char **p_format, va_list *p_va, char endchar, Py_ssize_t n, int static PyObject * do_mkvalue(const char **p_format, va_list *p_va, int flags) { +#define ERROR_NEED_PY_SSIZE_T_CLEAN \ + { \ + PyErr_SetString(PyExc_SystemError, \ + "PY_SSIZE_T_CLEAN macro must be defined for '#' formats"); \ + return NULL; \ + } + for (;;) { switch (*(*p_format)++) { case '(': @@ -341,14 +348,12 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) Py_ssize_t n; if (**p_format == '#') { ++*p_format; - if (flags & FLAG_SIZE_T) + if (flags & FLAG_SIZE_T) { n = va_arg(*p_va, Py_ssize_t); + } else { n = va_arg(*p_va, int); - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "PY_SSIZE_T_CLEAN will be required for '#' formats", 1)) { - return NULL; - } + ERROR_NEED_PY_SSIZE_T_CLEAN; } } else @@ -394,14 +399,12 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) Py_ssize_t n; if (**p_format == '#') { ++*p_format; - if (flags & FLAG_SIZE_T) + if (flags & FLAG_SIZE_T) { n = va_arg(*p_va, Py_ssize_t); + } else { n = va_arg(*p_va, int); - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "PY_SSIZE_T_CLEAN will be required for '#' formats", 1)) { - return NULL; - } + ERROR_NEED_PY_SSIZE_T_CLEAN; } } else @@ -432,14 +435,12 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) Py_ssize_t n; if (**p_format == '#') { ++*p_format; - if (flags & FLAG_SIZE_T) + if (flags & FLAG_SIZE_T) { n = va_arg(*p_va, Py_ssize_t); + } else { n = va_arg(*p_va, int); - if (PyErr_WarnEx(PyExc_DeprecationWarning, - "PY_SSIZE_T_CLEAN will be required for '#' formats", 1)) { - return NULL; - } + ERROR_NEED_PY_SSIZE_T_CLEAN; } } else @@ -507,6 +508,8 @@ do_mkvalue(const char **p_format, va_list *p_va, int flags) } } + +#undef ERROR_NEED_PY_SSIZE_T_CLEAN } From webhook-mailer at python.org Fri Jun 19 06:17:29 2020 From: webhook-mailer at python.org (Guido van Rossum) Date: Fri, 19 Jun 2020 10:17:29 -0000 Subject: [Python-checkins] bpo-40636: PEP 618: add strict parameter to zip() (GH-20921) Message-ID: https://github.com/python/cpython/commit/310f6aa7db8dd48952ed718111ce0f016b1c8ef9 commit: 310f6aa7db8dd48952ed718111ce0f016b1c8ef9 branch: master author: Guido van Rossum committer: GitHub date: 2020-06-19T12:16:57+02:00 summary: bpo-40636: PEP 618: add strict parameter to zip() (GH-20921) zip() now supports PEP 618's strict parameter, which raises a ValueError if the arguments are exhausted at different lengths. Patch by Brandt Bucher. Co-authored-by: Brandt Bucher Co-authored-by: Ram Rachum files: A Misc/NEWS.d/next/Core and Builtins/2020-06-17-10-27-17.bpo-40636.MYaCIe.rst M Lib/test/test_builtin.py M Python/bltinmodule.c diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py index 290ba2cad8e5e..40df7b606ae5e 100644 --- a/Lib/test/test_builtin.py +++ b/Lib/test/test_builtin.py @@ -1521,6 +1521,14 @@ def test_vars(self): self.assertRaises(TypeError, vars, 42) self.assertEqual(vars(self.C_get_vars()), {'a':2}) + def iter_error(self, iterable, error): + """Collect `iterable` into a list, catching an expected `error`.""" + items = [] + with self.assertRaises(error): + for item in iterable: + items.append(item) + return items + def test_zip(self): a = (1, 2, 3) b = (4, 5, 6) @@ -1573,6 +1581,66 @@ def test_zip_pickle(self): z1 = zip(a, b) self.check_iter_pickle(z1, t, proto) + def test_zip_pickle_strict(self): + a = (1, 2, 3) + b = (4, 5, 6) + t = [(1, 4), (2, 5), (3, 6)] + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z1 = zip(a, b, strict=True) + self.check_iter_pickle(z1, t, proto) + + def test_zip_pickle_strict_fail(self): + a = (1, 2, 3) + b = (4, 5, 6, 7) + t = [(1, 4), (2, 5), (3, 6)] + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + z1 = zip(a, b, strict=True) + z2 = pickle.loads(pickle.dumps(z1, proto)) + self.assertEqual(self.iter_error(z1, ValueError), t) + self.assertEqual(self.iter_error(z2, ValueError), t) + + def test_zip_pickle_stability(self): + # Pickles of zip((1, 2, 3), (4, 5, 6)) dumped from 3.9: + pickles = [ + b'citertools\nizip\np0\n(c__builtin__\niter\np1\n((I1\nI2\nI3\ntp2\ntp3\nRp4\nI0\nbg1\n((I4\nI5\nI6\ntp5\ntp6\nRp7\nI0\nbtp8\nRp9\n.', + b'citertools\nizip\nq\x00(c__builtin__\niter\nq\x01((K\x01K\x02K\x03tq\x02tq\x03Rq\x04K\x00bh\x01((K\x04K\x05K\x06tq\x05tq\x06Rq\x07K\x00btq\x08Rq\t.', + b'\x80\x02citertools\nizip\nq\x00c__builtin__\niter\nq\x01K\x01K\x02K\x03\x87q\x02\x85q\x03Rq\x04K\x00bh\x01K\x04K\x05K\x06\x87q\x05\x85q\x06Rq\x07K\x00b\x86q\x08Rq\t.', + b'\x80\x03cbuiltins\nzip\nq\x00cbuiltins\niter\nq\x01K\x01K\x02K\x03\x87q\x02\x85q\x03Rq\x04K\x00bh\x01K\x04K\x05K\x06\x87q\x05\x85q\x06Rq\x07K\x00b\x86q\x08Rq\t.', + b'\x80\x04\x95L\x00\x00\x00\x00\x00\x00\x00\x8c\x08builtins\x94\x8c\x03zip\x94\x93\x94\x8c\x08builtins\x94\x8c\x04iter\x94\x93\x94K\x01K\x02K\x03\x87\x94\x85\x94R\x94K\x00bh\x05K\x04K\x05K\x06\x87\x94\x85\x94R\x94K\x00b\x86\x94R\x94.', + b'\x80\x05\x95L\x00\x00\x00\x00\x00\x00\x00\x8c\x08builtins\x94\x8c\x03zip\x94\x93\x94\x8c\x08builtins\x94\x8c\x04iter\x94\x93\x94K\x01K\x02K\x03\x87\x94\x85\x94R\x94K\x00bh\x05K\x04K\x05K\x06\x87\x94\x85\x94R\x94K\x00b\x86\x94R\x94.', + ] + for protocol, dump in enumerate(pickles): + z1 = zip((1, 2, 3), (4, 5, 6)) + z2 = zip((1, 2, 3), (4, 5, 6), strict=False) + z3 = pickle.loads(dump) + l3 = list(z3) + self.assertEqual(type(z3), zip) + self.assertEqual(pickle.dumps(z1, protocol), dump) + self.assertEqual(pickle.dumps(z2, protocol), dump) + self.assertEqual(list(z1), l3) + self.assertEqual(list(z2), l3) + + def test_zip_pickle_strict_stability(self): + # Pickles of zip((1, 2, 3), (4, 5), strict=True) dumped from 3.10: + pickles = [ + b'citertools\nizip\np0\n(c__builtin__\niter\np1\n((I1\nI2\nI3\ntp2\ntp3\nRp4\nI0\nbg1\n((I4\nI5\ntp5\ntp6\nRp7\nI0\nbtp8\nRp9\nI01\nb.', + b'citertools\nizip\nq\x00(c__builtin__\niter\nq\x01((K\x01K\x02K\x03tq\x02tq\x03Rq\x04K\x00bh\x01((K\x04K\x05tq\x05tq\x06Rq\x07K\x00btq\x08Rq\tI01\nb.', + b'\x80\x02citertools\nizip\nq\x00c__builtin__\niter\nq\x01K\x01K\x02K\x03\x87q\x02\x85q\x03Rq\x04K\x00bh\x01K\x04K\x05\x86q\x05\x85q\x06Rq\x07K\x00b\x86q\x08Rq\t\x88b.', + b'\x80\x03cbuiltins\nzip\nq\x00cbuiltins\niter\nq\x01K\x01K\x02K\x03\x87q\x02\x85q\x03Rq\x04K\x00bh\x01K\x04K\x05\x86q\x05\x85q\x06Rq\x07K\x00b\x86q\x08Rq\t\x88b.', + b'\x80\x04\x95L\x00\x00\x00\x00\x00\x00\x00\x8c\x08builtins\x94\x8c\x03zip\x94\x93\x94\x8c\x08builtins\x94\x8c\x04iter\x94\x93\x94K\x01K\x02K\x03\x87\x94\x85\x94R\x94K\x00bh\x05K\x04K\x05\x86\x94\x85\x94R\x94K\x00b\x86\x94R\x94\x88b.', + b'\x80\x05\x95L\x00\x00\x00\x00\x00\x00\x00\x8c\x08builtins\x94\x8c\x03zip\x94\x93\x94\x8c\x08builtins\x94\x8c\x04iter\x94\x93\x94K\x01K\x02K\x03\x87\x94\x85\x94R\x94K\x00bh\x05K\x04K\x05\x86\x94\x85\x94R\x94K\x00b\x86\x94R\x94\x88b.', + ] + a = (1, 2, 3) + b = (4, 5) + t = [(1, 4), (2, 5)] + for protocol, dump in enumerate(pickles): + z1 = zip(a, b, strict=True) + z2 = pickle.loads(dump) + self.assertEqual(pickle.dumps(z1, protocol), dump) + self.assertEqual(type(z2), zip) + self.assertEqual(self.iter_error(z1, ValueError), t) + self.assertEqual(self.iter_error(z2, ValueError), t) + def test_zip_bad_iterable(self): exception = TypeError() @@ -1585,6 +1653,88 @@ def __iter__(self): self.assertIs(cm.exception, exception) + def test_zip_strict(self): + self.assertEqual(tuple(zip((1, 2, 3), 'abc', strict=True)), + ((1, 'a'), (2, 'b'), (3, 'c'))) + self.assertRaises(ValueError, tuple, + zip((1, 2, 3, 4), 'abc', strict=True)) + self.assertRaises(ValueError, tuple, + zip((1, 2), 'abc', strict=True)) + self.assertRaises(ValueError, tuple, + zip((1, 2), (1, 2), 'abc', strict=True)) + + def test_zip_strict_iterators(self): + x = iter(range(5)) + y = [0] + z = iter(range(5)) + self.assertRaises(ValueError, list, + (zip(x, y, z, strict=True))) + self.assertEqual(next(x), 2) + self.assertEqual(next(z), 1) + + def test_zip_strict_error_handling(self): + + class Error(Exception): + pass + + class Iter: + def __init__(self, size): + self.size = size + def __iter__(self): + return self + def __next__(self): + self.size -= 1 + if self.size < 0: + raise Error + return self.size + + l1 = self.iter_error(zip("AB", Iter(1), strict=True), Error) + self.assertEqual(l1, [("A", 0)]) + l2 = self.iter_error(zip("AB", Iter(2), "A", strict=True), ValueError) + self.assertEqual(l2, [("A", 1, "A")]) + l3 = self.iter_error(zip("AB", Iter(2), "ABC", strict=True), Error) + self.assertEqual(l3, [("A", 1, "A"), ("B", 0, "B")]) + l4 = self.iter_error(zip("AB", Iter(3), strict=True), ValueError) + self.assertEqual(l4, [("A", 2), ("B", 1)]) + l5 = self.iter_error(zip(Iter(1), "AB", strict=True), Error) + self.assertEqual(l5, [(0, "A")]) + l6 = self.iter_error(zip(Iter(2), "A", strict=True), ValueError) + self.assertEqual(l6, [(1, "A")]) + l7 = self.iter_error(zip(Iter(2), "ABC", strict=True), Error) + self.assertEqual(l7, [(1, "A"), (0, "B")]) + l8 = self.iter_error(zip(Iter(3), "AB", strict=True), ValueError) + self.assertEqual(l8, [(2, "A"), (1, "B")]) + + def test_zip_strict_error_handling_stopiteration(self): + + class Iter: + def __init__(self, size): + self.size = size + def __iter__(self): + return self + def __next__(self): + self.size -= 1 + if self.size < 0: + raise StopIteration + return self.size + + l1 = self.iter_error(zip("AB", Iter(1), strict=True), ValueError) + self.assertEqual(l1, [("A", 0)]) + l2 = self.iter_error(zip("AB", Iter(2), "A", strict=True), ValueError) + self.assertEqual(l2, [("A", 1, "A")]) + l3 = self.iter_error(zip("AB", Iter(2), "ABC", strict=True), ValueError) + self.assertEqual(l3, [("A", 1, "A"), ("B", 0, "B")]) + l4 = self.iter_error(zip("AB", Iter(3), strict=True), ValueError) + self.assertEqual(l4, [("A", 2), ("B", 1)]) + l5 = self.iter_error(zip(Iter(1), "AB", strict=True), ValueError) + self.assertEqual(l5, [(0, "A")]) + l6 = self.iter_error(zip(Iter(2), "A", strict=True), ValueError) + self.assertEqual(l6, [(1, "A")]) + l7 = self.iter_error(zip(Iter(2), "ABC", strict=True), ValueError) + self.assertEqual(l7, [(1, "A"), (0, "B")]) + l8 = self.iter_error(zip(Iter(3), "AB", strict=True), ValueError) + self.assertEqual(l8, [(2, "A"), (1, "B")]) + def test_format(self): # Test the basic machinery of the format() builtin. Don't test # the specifics of the various formatters diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-17-10-27-17.bpo-40636.MYaCIe.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-17-10-27-17.bpo-40636.MYaCIe.rst new file mode 100644 index 0000000000000..ba26ad9373ce3 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-17-10-27-17.bpo-40636.MYaCIe.rst @@ -0,0 +1,3 @@ +:func:`zip` now supports :pep:`618`'s ``strict`` parameter, which raises a +:exc:`ValueError` if the arguments are exhausted at different lengths. +Patch by Brandt Bucher. diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index 65f9528084654..c6ede1cd7f6d6 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -2517,9 +2517,10 @@ builtin_issubclass_impl(PyObject *module, PyObject *cls, typedef struct { PyObject_HEAD - Py_ssize_t tuplesize; - PyObject *ittuple; /* tuple of iterators */ + Py_ssize_t tuplesize; + PyObject *ittuple; /* tuple of iterators */ PyObject *result; + int strict; } zipobject; static PyObject * @@ -2530,9 +2531,21 @@ zip_new(PyTypeObject *type, PyObject *args, PyObject *kwds) PyObject *ittuple; /* tuple of iterators */ PyObject *result; Py_ssize_t tuplesize; + int strict = 0; - if (type == &PyZip_Type && !_PyArg_NoKeywords("zip", kwds)) - return NULL; + if (kwds) { + PyObject *empty = PyTuple_New(0); + if (empty == NULL) { + return NULL; + } + static char *kwlist[] = {"strict", NULL}; + int parsed = PyArg_ParseTupleAndKeywords( + empty, kwds, "|$p:zip", kwlist, &strict); + Py_DECREF(empty); + if (!parsed) { + return NULL; + } + } /* args must be a tuple */ assert(PyTuple_Check(args)); @@ -2573,6 +2586,7 @@ zip_new(PyTypeObject *type, PyObject *args, PyObject *kwds) lz->ittuple = ittuple; lz->tuplesize = tuplesize; lz->result = result; + lz->strict = strict; return (PyObject *)lz; } @@ -2613,6 +2627,9 @@ zip_next(zipobject *lz) item = (*Py_TYPE(it)->tp_iternext)(it); if (item == NULL) { Py_DECREF(result); + if (lz->strict) { + goto check; + } return NULL; } olditem = PyTuple_GET_ITEM(result, i); @@ -2628,28 +2645,85 @@ zip_next(zipobject *lz) item = (*Py_TYPE(it)->tp_iternext)(it); if (item == NULL) { Py_DECREF(result); + if (lz->strict) { + goto check; + } return NULL; } PyTuple_SET_ITEM(result, i, item); } } return result; +check: + if (PyErr_Occurred()) { + if (!PyErr_ExceptionMatches(PyExc_StopIteration)) { + // next() on argument i raised an exception (not StopIteration) + return NULL; + } + PyErr_Clear(); + } + if (i) { + // ValueError: zip() argument 2 is shorter than argument 1 + // ValueError: zip() argument 3 is shorter than arguments 1-2 + const char* plural = i == 1 ? " " : "s 1-"; + return PyErr_Format(PyExc_ValueError, + "zip() argument %d is shorter than argument%s%d", + i + 1, plural, i); + } + for (i = 1; i < tuplesize; i++) { + it = PyTuple_GET_ITEM(lz->ittuple, i); + item = (*Py_TYPE(it)->tp_iternext)(it); + if (item) { + Py_DECREF(item); + const char* plural = i == 1 ? " " : "s 1-"; + return PyErr_Format(PyExc_ValueError, + "zip() argument %d is longer than argument%s%d", + i + 1, plural, i); + } + if (PyErr_Occurred()) { + if (!PyErr_ExceptionMatches(PyExc_StopIteration)) { + // next() on argument i raised an exception (not StopIteration) + return NULL; + } + PyErr_Clear(); + } + // Argument i is exhausted. So far so good... + } + // All arguments are exhausted. Success! + return NULL; } static PyObject * zip_reduce(zipobject *lz, PyObject *Py_UNUSED(ignored)) { /* Just recreate the zip with the internal iterator tuple */ - return Py_BuildValue("OO", Py_TYPE(lz), lz->ittuple); + if (lz->strict) { + return PyTuple_Pack(3, Py_TYPE(lz), lz->ittuple, Py_True); + } + return PyTuple_Pack(2, Py_TYPE(lz), lz->ittuple); +} + +PyDoc_STRVAR(setstate_doc, "Set state information for unpickling."); + +static PyObject * +zip_setstate(zipobject *lz, PyObject *state) +{ + int strict = PyObject_IsTrue(state); + if (strict < 0) { + return NULL; + } + lz->strict = strict; + Py_RETURN_NONE; } static PyMethodDef zip_methods[] = { {"__reduce__", (PyCFunction)zip_reduce, METH_NOARGS, reduce_doc}, - {NULL, NULL} /* sentinel */ + {"__setstate__", (PyCFunction)zip_setstate, METH_O, setstate_doc}, + {NULL} /* sentinel */ }; PyDoc_STRVAR(zip_doc, -"zip(*iterables) --> A zip object yielding tuples until an input is exhausted.\n\ +"zip(*iterables, strict=False) --> Yield tuples until an input is exhausted.\n\ \n\ >>> list(zip('abcdefg', range(3), range(4)))\n\ [('a', 0, 0), ('b', 1, 1), ('c', 2, 2)]\n\ @@ -2657,7 +2731,10 @@ PyDoc_STRVAR(zip_doc, The zip object yields n-length tuples, where n is the number of iterables\n\ passed as positional arguments to zip(). The i-th element in every tuple\n\ comes from the i-th iterable argument to zip(). This continues until the\n\ -shortest argument is exhausted."); +shortest argument is exhausted.\n\ +\n\ +If strict is true and one of the arguments is exhausted before the others,\n\ +raise a ValueError."); PyTypeObject PyZip_Type = { PyVarObject_HEAD_INIT(&PyType_Type, 0) From webhook-mailer at python.org Fri Jun 19 08:16:02 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 19 Jun 2020 12:16:02 -0000 Subject: [Python-checkins] Improve readability of `formataddr` docstring (GH-20963) Message-ID: https://github.com/python/cpython/commit/4dd10ed6942ab99bc5b0b1c5ecc88a753877aefd commit: 4dd10ed6942ab99bc5b0b1c5ecc88a753877aefd branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-19T05:15:57-07:00 summary: Improve readability of `formataddr` docstring (GH-20963) For me as a non native English speaker, the sentence with its embedded clause was very hard to understand. modified: Lib/email/utils.py Automerge-Triggered-By: @csabella (cherry picked from commit 66a65ba43cb3e68a43e32469c988dd7a6cff049c) Co-authored-by: J?rgen Gmach files: M Lib/email/utils.py diff --git a/Lib/email/utils.py b/Lib/email/utils.py index 858f620e25bfb..07dd029cc0280 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -81,7 +81,7 @@ def formataddr(pair, charset='utf-8'): If the first element of pair is false, then the second element is returned unmodified. - Optional charset if given is the character set that is used to encode + The optional charset is the character set that is used to encode realname in case realname is not ASCII safe. Can be an instance of str or a Charset-like object which has a header_encode method. Default is 'utf-8'. From webhook-mailer at python.org Fri Jun 19 10:11:10 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Fri, 19 Jun 2020 14:11:10 -0000 Subject: [Python-checkins] Improve blake2 comment for Victor (GH-20981) Message-ID: https://github.com/python/cpython/commit/8a0fe7b4544ba28eeea6e16ddb646bb0b5d2918e commit: 8a0fe7b4544ba28eeea6e16ddb646bb0b5d2918e branch: master author: Christian Heimes committer: GitHub date: 2020-06-19T07:11:02-07:00 summary: Improve blake2 comment for Victor (GH-20981) Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran files: M Lib/hashlib.py diff --git a/Lib/hashlib.py b/Lib/hashlib.py index 1b6e50247c181..58c340d56e3ba 100644 --- a/Lib/hashlib.py +++ b/Lib/hashlib.py @@ -70,6 +70,11 @@ __builtin_constructor_cache = {} +# Prefer our blake2 implementation +# OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. The OpenSSL +# implementations neither support keyed blake2 (blake2 MAC) nor advanced +# features like salt, personalization, or tree hashing. OpenSSL hash-only +# variants are available as 'blake2b512' and 'blake2s256', though. __block_openssl_constructor = { 'blake2b', 'blake2s', } @@ -120,7 +125,7 @@ def __get_builtin_constructor(name): def __get_openssl_constructor(name): if name in __block_openssl_constructor: - # Prefer our blake2 and sha3 implementation. + # Prefer our builtin blake2 implementation. return __get_builtin_constructor(name) try: # MD5, SHA1, and SHA2 are in all supported OpenSSL versions @@ -149,10 +154,7 @@ def __hash_new(name, data=b'', **kwargs): optionally initialized with data (which must be a bytes-like object). """ if name in __block_openssl_constructor: - # Prefer our blake2 and sha3 implementation - # OpenSSL 1.1.0 comes with a limited implementation of blake2b/s. - # It does neither support keyed blake2 nor advanced features like - # salt, personal, tree hashing or SSE. + # Prefer our builtin blake2 implementation. return __get_builtin_constructor(name)(data, **kwargs) try: return _hashlib.new(name, data, **kwargs) From webhook-mailer at python.org Fri Jun 19 11:56:18 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Fri, 19 Jun 2020 15:56:18 -0000 Subject: [Python-checkins] bpo-40077: Convert _bz2 module to use PyType_FromSpec (GH-20960) Message-ID: https://github.com/python/cpython/commit/ec689187957cc80af56b9a63251bbc295bafd781 commit: ec689187957cc80af56b9a63251bbc295bafd781 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-20T00:56:13+09:00 summary: bpo-40077: Convert _bz2 module to use PyType_FromSpec (GH-20960) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-18-19-04-30.bpo-40077._yI-ax.rst M Modules/_bz2module.c M Modules/clinic/_bz2module.c.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-18-19-04-30.bpo-40077._yI-ax.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-18-19-04-30.bpo-40077._yI-ax.rst new file mode 100644 index 0000000000000..2e0258a7b369d --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-18-19-04-30.bpo-40077._yI-ax.rst @@ -0,0 +1 @@ +Convert :mod:`_bz2` to use :c:func:`PyType_FromSpec`. diff --git a/Modules/_bz2module.c b/Modules/_bz2module.c index 880632c62349f..effb0de2e6536 100644 --- a/Modules/_bz2module.c +++ b/Modules/_bz2module.c @@ -28,6 +28,19 @@ #define RELEASE_LOCK(obj) PyThread_release_lock((obj)->lock) +typedef struct { + PyTypeObject *bz2_compressor_type; + PyTypeObject *bz2_decompressor_type; +} _bz2_state; + +static inline _bz2_state* +get_bz2_state(PyObject *module) +{ + void *state = PyModule_GetState(module); + assert(state != NULL); + return (_bz2_state *)state; +} + typedef struct { PyObject_HEAD bz_stream bzs; @@ -51,9 +64,6 @@ typedef struct { PyThread_type_lock lock; } BZ2Decompressor; -static PyTypeObject BZ2Compressor_Type; -static PyTypeObject BZ2Decompressor_Type; - /* Helper functions. */ static int @@ -262,6 +272,21 @@ _bz2_BZ2Compressor_flush_impl(BZ2Compressor *self) return result; } +/*[clinic input] +_bz2.BZ2Compressor.__reduce__ + +[clinic start generated code]*/ + +static PyObject * +_bz2_BZ2Compressor___reduce___impl(BZ2Compressor *self) +/*[clinic end generated code: output=d13db66ae043e141 input=e09bccef0e6731b2]*/ +{ + PyErr_Format(PyExc_TypeError, + "cannot pickle %s object", + Py_TYPE(self)->tp_name); + return NULL; +} + static void* BZ2_Malloc(void* ctx, int items, int size) { @@ -280,21 +305,11 @@ BZ2_Free(void* ctx, void *ptr) PyMem_RawFree(ptr); } -/*[clinic input] -_bz2.BZ2Compressor.__init__ - - compresslevel: int = 9 - Compression level, as a number between 1 and 9. - / - -Create a compressor object for compressing data incrementally. - -For one-shot compression, use the compress() function instead. -[clinic start generated code]*/ +/* Argument Clinic is not used since the Argument Clinic always want to + check the type which would be wrong here */ static int _bz2_BZ2Compressor___init___impl(BZ2Compressor *self, int compresslevel) -/*[clinic end generated code: output=c4e6adfd02963827 input=4e1ff7b8394b6e9a]*/ { int bzerror; @@ -325,63 +340,89 @@ _bz2_BZ2Compressor___init___impl(BZ2Compressor *self, int compresslevel) return -1; } +PyDoc_STRVAR(_bz2_BZ2Compressor___init____doc__, +"BZ2Compressor(compresslevel=9, /)\n" +"--\n" +"\n" +"Create a compressor object for compressing data incrementally.\n" +"\n" +" compresslevel\n" +" Compression level, as a number between 1 and 9.\n" +"\n" +"For one-shot compression, use the compress() function instead."); + +static int +_bz2_BZ2Compressor___init__(PyObject *self, PyObject *args, PyObject *kwargs) +{ + int return_value = -1; + int compresslevel = 9; + + if (!_PyArg_NoKeywords("BZ2Compressor", kwargs)) { + goto exit; + } + if (!_PyArg_CheckPositional("BZ2Compressor", PyTuple_GET_SIZE(args), 0, 1)) { + goto exit; + } + if (PyTuple_GET_SIZE(args) < 1) { + goto skip_optional; + } + compresslevel = _PyLong_AsInt(PyTuple_GET_ITEM(args, 0)); + if (compresslevel == -1 && PyErr_Occurred()) { + goto exit; + } +skip_optional: + return_value = _bz2_BZ2Compressor___init___impl((BZ2Compressor *)self, compresslevel); + +exit: + return return_value; +} + static void BZ2Compressor_dealloc(BZ2Compressor *self) { BZ2_bzCompressEnd(&self->bzs); - if (self->lock != NULL) + if (self->lock != NULL) { PyThread_free_lock(self->lock); - Py_TYPE(self)->tp_free((PyObject *)self); + } + PyTypeObject *tp = Py_TYPE(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); +} + +static int +BZ2Compressor_traverse(BZ2Compressor *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; } static PyMethodDef BZ2Compressor_methods[] = { _BZ2_BZ2COMPRESSOR_COMPRESS_METHODDEF _BZ2_BZ2COMPRESSOR_FLUSH_METHODDEF + _BZ2_BZ2COMPRESSOR___REDUCE___METHODDEF {NULL} }; - -static PyTypeObject BZ2Compressor_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_bz2.BZ2Compressor", /* tp_name */ - sizeof(BZ2Compressor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)BZ2Compressor_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - _bz2_BZ2Compressor___init____doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - BZ2Compressor_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - _bz2_BZ2Compressor___init__, /* tp_init */ - 0, /* tp_alloc */ - PyType_GenericNew, /* tp_new */ +static PyType_Slot bz2_compressor_type_slots[] = { + {Py_tp_dealloc, BZ2Compressor_dealloc}, + {Py_tp_methods, BZ2Compressor_methods}, + {Py_tp_init, _bz2_BZ2Compressor___init__}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_doc, (char *)_bz2_BZ2Compressor___init____doc__}, + {Py_tp_traverse, BZ2Compressor_traverse}, + {0, 0} }; +static PyType_Spec bz2_compressor_type_spec = { + .name = "_bz2.BZ2Compressor", + .basicsize = sizeof(BZ2Compressor), + // Calling PyType_GetModuleState() on a subclass is not safe. + // bz2_compressor_type_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = Py_TPFLAGS_DEFAULT, + .slots = bz2_compressor_type_slots, +}; /* BZ2Decompressor class. */ @@ -602,16 +643,24 @@ _bz2_BZ2Decompressor_decompress_impl(BZ2Decompressor *self, Py_buffer *data, } /*[clinic input] -_bz2.BZ2Decompressor.__init__ +_bz2.BZ2Decompressor.__reduce__ -Create a decompressor object for decompressing data incrementally. - -For one-shot decompression, use the decompress() function instead. [clinic start generated code]*/ +static PyObject * +_bz2_BZ2Decompressor___reduce___impl(BZ2Decompressor *self) +/*[clinic end generated code: output=f6a40650813f482e input=8db9175a609fdd43]*/ +{ + PyErr_Format(PyExc_TypeError, + "cannot pickle %s object", + Py_TYPE(self)->tp_name); + return NULL; +} + +/* Argument Clinic is not used since the Argument Clinic always want to + check the type which would be wrong here */ static int _bz2_BZ2Decompressor___init___impl(BZ2Decompressor *self) -/*[clinic end generated code: output=e4d2b9bb866ab8f1 input=95f6500dcda60088]*/ { int bzerror; @@ -646,20 +695,58 @@ _bz2_BZ2Decompressor___init___impl(BZ2Decompressor *self) return -1; } +static int +_bz2_BZ2Decompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs) +{ + int return_value = -1; + + if (!_PyArg_NoPositional("BZ2Decompressor", args)) { + goto exit; + } + if (!_PyArg_NoKeywords("BZ2Decompressor", kwargs)) { + goto exit; + } + return_value = _bz2_BZ2Decompressor___init___impl((BZ2Decompressor *)self); + +exit: + return return_value; +} + +PyDoc_STRVAR(_bz2_BZ2Decompressor___init____doc__, +"BZ2Decompressor()\n" +"--\n" +"\n" +"Create a decompressor object for decompressing data incrementally.\n" +"\n" +"For one-shot decompression, use the decompress() function instead."); + static void BZ2Decompressor_dealloc(BZ2Decompressor *self) { - if(self->input_buffer != NULL) + if(self->input_buffer != NULL) { PyMem_Free(self->input_buffer); + } BZ2_bzDecompressEnd(&self->bzs); Py_CLEAR(self->unused_data); - if (self->lock != NULL) + if (self->lock != NULL) { PyThread_free_lock(self->lock); - Py_TYPE(self)->tp_free((PyObject *)self); + } + + PyTypeObject *tp = Py_TYPE(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); +} + +static int +BZ2Decompressor_traverse(BZ2Decompressor *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; } static PyMethodDef BZ2Decompressor_methods[] = { _BZ2_BZ2DECOMPRESSOR_DECOMPRESS_METHODDEF + _BZ2_BZ2DECOMPRESSOR___REDUCE___METHODDEF {NULL} }; @@ -682,64 +769,81 @@ static PyMemberDef BZ2Decompressor_members[] = { {NULL} }; -static PyTypeObject BZ2Decompressor_Type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_bz2.BZ2Decompressor", /* tp_name */ - sizeof(BZ2Decompressor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)BZ2Decompressor_dealloc,/* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - _bz2_BZ2Decompressor___init____doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - BZ2Decompressor_methods, /* tp_methods */ - BZ2Decompressor_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - _bz2_BZ2Decompressor___init__, /* tp_init */ - 0, /* tp_alloc */ - PyType_GenericNew, /* tp_new */ +static PyType_Slot bz2_decompressor_type_slots[] = { + {Py_tp_dealloc, BZ2Decompressor_dealloc}, + {Py_tp_methods, BZ2Decompressor_methods}, + {Py_tp_init, _bz2_BZ2Decompressor___init__}, + {Py_tp_doc, (char *)_bz2_BZ2Decompressor___init____doc__}, + {Py_tp_members, BZ2Decompressor_members}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_traverse, BZ2Decompressor_traverse}, + {0, 0} }; +static PyType_Spec bz2_decompressor_type_spec = { + .name = "_bz2.BZ2Decompressor", + .basicsize = sizeof(BZ2Decompressor), + // Calling PyType_GetModuleState() on a subclass is not safe. + // bz2_decompressor_type_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = Py_TPFLAGS_DEFAULT, + .slots = bz2_decompressor_type_slots, +}; /* Module initialization. */ static int _bz2_exec(PyObject *module) { - if (PyModule_AddType(module, &BZ2Compressor_Type) < 0) { + _bz2_state *state = get_bz2_state(module); + state->bz2_compressor_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &bz2_compressor_type_spec, NULL); + if (state->bz2_compressor_type == NULL) { + return -1; + } + + if (PyModule_AddType(module, state->bz2_compressor_type) < 0) { return -1; } - if (PyModule_AddType(module, &BZ2Decompressor_Type) < 0) { + state->bz2_decompressor_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &bz2_decompressor_type_spec, NULL); + if (state->bz2_decompressor_type == NULL) { + return -1; + } + + if (PyModule_AddType(module, state->bz2_decompressor_type) < 0) { return -1; } return 0; } +static int +_bz2_traverse(PyObject *module, visitproc visit, void *arg) +{ + _bz2_state *state = get_bz2_state(module); + Py_VISIT(state->bz2_compressor_type); + Py_VISIT(state->bz2_decompressor_type); + return 0; +} + +static int +_bz2_clear(PyObject *module) +{ + _bz2_state *state = get_bz2_state(module); + Py_CLEAR(state->bz2_compressor_type); + Py_CLEAR(state->bz2_decompressor_type); + return 0; +} + +static void +_bz2_free(void *module) +{ + _bz2_clear((PyObject *)module); +} + static struct PyModuleDef_Slot _bz2_slots[] = { {Py_mod_exec, _bz2_exec}, {0, NULL} @@ -747,14 +851,12 @@ static struct PyModuleDef_Slot _bz2_slots[] = { static struct PyModuleDef _bz2module = { PyModuleDef_HEAD_INIT, - "_bz2", - NULL, - 0, - NULL, - _bz2_slots, - NULL, - NULL, - NULL + .m_name = "_bz2", + .m_size = sizeof(_bz2_state), + .m_slots = _bz2_slots, + .m_traverse = _bz2_traverse, + .m_clear = _bz2_clear, + .m_free = _bz2_free, }; PyMODINIT_FUNC diff --git a/Modules/clinic/_bz2module.c.h b/Modules/clinic/_bz2module.c.h index 466020787449e..ff67d34155dfd 100644 --- a/Modules/clinic/_bz2module.c.h +++ b/Modules/clinic/_bz2module.c.h @@ -65,45 +65,21 @@ _bz2_BZ2Compressor_flush(BZ2Compressor *self, PyObject *Py_UNUSED(ignored)) return _bz2_BZ2Compressor_flush_impl(self); } -PyDoc_STRVAR(_bz2_BZ2Compressor___init____doc__, -"BZ2Compressor(compresslevel=9, /)\n" +PyDoc_STRVAR(_bz2_BZ2Compressor___reduce____doc__, +"__reduce__($self, /)\n" "--\n" -"\n" -"Create a compressor object for compressing data incrementally.\n" -"\n" -" compresslevel\n" -" Compression level, as a number between 1 and 9.\n" -"\n" -"For one-shot compression, use the compress() function instead."); - -static int -_bz2_BZ2Compressor___init___impl(BZ2Compressor *self, int compresslevel); +"\n"); -static int -_bz2_BZ2Compressor___init__(PyObject *self, PyObject *args, PyObject *kwargs) -{ - int return_value = -1; - int compresslevel = 9; +#define _BZ2_BZ2COMPRESSOR___REDUCE___METHODDEF \ + {"__reduce__", (PyCFunction)_bz2_BZ2Compressor___reduce__, METH_NOARGS, _bz2_BZ2Compressor___reduce____doc__}, - if (Py_IS_TYPE(self, &BZ2Compressor_Type) && - !_PyArg_NoKeywords("BZ2Compressor", kwargs)) { - goto exit; - } - if (!_PyArg_CheckPositional("BZ2Compressor", PyTuple_GET_SIZE(args), 0, 1)) { - goto exit; - } - if (PyTuple_GET_SIZE(args) < 1) { - goto skip_optional; - } - compresslevel = _PyLong_AsInt(PyTuple_GET_ITEM(args, 0)); - if (compresslevel == -1 && PyErr_Occurred()) { - goto exit; - } -skip_optional: - return_value = _bz2_BZ2Compressor___init___impl((BZ2Compressor *)self, compresslevel); +static PyObject * +_bz2_BZ2Compressor___reduce___impl(BZ2Compressor *self); -exit: - return return_value; +static PyObject * +_bz2_BZ2Compressor___reduce__(BZ2Compressor *self, PyObject *Py_UNUSED(ignored)) +{ + return _bz2_BZ2Compressor___reduce___impl(self); } PyDoc_STRVAR(_bz2_BZ2Decompressor_decompress__doc__, @@ -181,33 +157,20 @@ _bz2_BZ2Decompressor_decompress(BZ2Decompressor *self, PyObject *const *args, Py return return_value; } -PyDoc_STRVAR(_bz2_BZ2Decompressor___init____doc__, -"BZ2Decompressor()\n" +PyDoc_STRVAR(_bz2_BZ2Decompressor___reduce____doc__, +"__reduce__($self, /)\n" "--\n" -"\n" -"Create a decompressor object for decompressing data incrementally.\n" -"\n" -"For one-shot decompression, use the decompress() function instead."); +"\n"); -static int -_bz2_BZ2Decompressor___init___impl(BZ2Decompressor *self); +#define _BZ2_BZ2DECOMPRESSOR___REDUCE___METHODDEF \ + {"__reduce__", (PyCFunction)_bz2_BZ2Decompressor___reduce__, METH_NOARGS, _bz2_BZ2Decompressor___reduce____doc__}, -static int -_bz2_BZ2Decompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs) -{ - int return_value = -1; - - if (Py_IS_TYPE(self, &BZ2Decompressor_Type) && - !_PyArg_NoPositional("BZ2Decompressor", args)) { - goto exit; - } - if (Py_IS_TYPE(self, &BZ2Decompressor_Type) && - !_PyArg_NoKeywords("BZ2Decompressor", kwargs)) { - goto exit; - } - return_value = _bz2_BZ2Decompressor___init___impl((BZ2Decompressor *)self); +static PyObject * +_bz2_BZ2Decompressor___reduce___impl(BZ2Decompressor *self); -exit: - return return_value; +static PyObject * +_bz2_BZ2Decompressor___reduce__(BZ2Decompressor *self, PyObject *Py_UNUSED(ignored)) +{ + return _bz2_BZ2Decompressor___reduce___impl(self); } -/*[clinic end generated code: output=b49102ee26928a28 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=001f31fdacb4cb01 input=a9049054013a1b77]*/ From webhook-mailer at python.org Fri Jun 19 12:01:26 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Fri, 19 Jun 2020 16:01:26 -0000 Subject: [Python-checkins] bpo-38377: Fix skip_if_broken_multiprocessing_synchronize() on macOS (GH-20984) Message-ID: https://github.com/python/cpython/commit/3358da4054b9b0b045eb47dc74dee3d58bfbb1d5 commit: 3358da4054b9b0b045eb47dc74dee3d58bfbb1d5 branch: master author: Victor Stinner committer: GitHub date: 2020-06-19T18:01:20+02:00 summary: bpo-38377: Fix skip_if_broken_multiprocessing_synchronize() on macOS (GH-20984) skip_if_broken_multiprocessing_synchronize() only attempts for create a semaphore on Linux to fix multiprocessing test_resource_tracker_reused() on macOS. files: M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index d9dbdc13008dc..bceb8cda20c35 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1962,7 +1962,7 @@ def skip_if_broken_multiprocessing_synchronize(): """ Skip tests if the multiprocessing.synchronize module is missing, if there is no available semaphore implementation, or if creating a lock raises an - OSError. + OSError (on Linux only). """ # Skip tests if the _multiprocessing extension is missing. @@ -1972,10 +1972,11 @@ def skip_if_broken_multiprocessing_synchronize(): # multiprocessing.synchronize requires _multiprocessing.SemLock. synchronize = import_module('multiprocessing.synchronize') - try: - # bpo-38377: On Linux, creating a semaphore is the current user - # does not have the permission to create a file in /dev/shm. - # Create a semaphore to check permissions. - synchronize.Lock(ctx=None) - except OSError as exc: - raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}") + if sys.platform == "linux": + try: + # bpo-38377: On Linux, creating a semaphore fails with OSError + # if the current user does not have the permission to create + # a file in /dev/shm/ directory. + synchronize.Lock(ctx=None) + except OSError as exc: + raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}") From webhook-mailer at python.org Fri Jun 19 12:19:46 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 19 Jun 2020 16:19:46 -0000 Subject: [Python-checkins] bpo-38377: Fix skip_if_broken_multiprocessing_synchronize() on macOS (GH-20984) Message-ID: https://github.com/python/cpython/commit/ec9bc2da421c456e416d991fd1fe79ac33344d9d commit: ec9bc2da421c456e416d991fd1fe79ac33344d9d branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-19T09:19:38-07:00 summary: bpo-38377: Fix skip_if_broken_multiprocessing_synchronize() on macOS (GH-20984) skip_if_broken_multiprocessing_synchronize() only attempts for create a semaphore on Linux to fix multiprocessing test_resource_tracker_reused() on macOS. (cherry picked from commit 3358da4054b9b0b045eb47dc74dee3d58bfbb1d5) Co-authored-by: Victor Stinner files: M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index b75dbd214fe36..3d287a98ac22d 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -3383,7 +3383,7 @@ def skip_if_broken_multiprocessing_synchronize(): """ Skip tests if the multiprocessing.synchronize module is missing, if there is no available semaphore implementation, or if creating a lock raises an - OSError. + OSError (on Linux only). """ # Skip tests if the _multiprocessing extension is missing. @@ -3393,10 +3393,11 @@ def skip_if_broken_multiprocessing_synchronize(): # multiprocessing.synchronize requires _multiprocessing.SemLock. synchronize = import_module('multiprocessing.synchronize') - try: - # bpo-38377: On Linux, creating a semaphore is the current user - # does not have the permission to create a file in /dev/shm. - # Create a semaphore to check permissions. - synchronize.Lock(ctx=None) - except OSError as exc: - raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}") + if sys.platform == "linux": + try: + # bpo-38377: On Linux, creating a semaphore fails with OSError + # if the current user does not have the permission to create + # a file in /dev/shm/ directory. + synchronize.Lock(ctx=None) + except OSError as exc: + raise unittest.SkipTest(f"broken multiprocessing SemLock: {exc!r}") From webhook-mailer at python.org Fri Jun 19 16:39:31 2020 From: webhook-mailer at python.org (Ram Rachum) Date: Fri, 19 Jun 2020 20:39:31 -0000 Subject: [Python-checkins] bpo-40636: Documentation for zip-strict (#20961) Message-ID: https://github.com/python/cpython/commit/59cf853332a82ce92875ea3dd6bba08e1305a288 commit: 59cf853332a82ce92875ea3dd6bba08e1305a288 branch: master author: Ram Rachum committer: GitHub date: 2020-06-19T13:39:22-07:00 summary: bpo-40636: Documentation for zip-strict (#20961) files: M Doc/library/functions.rst M Doc/whatsnew/3.10.rst diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index e9c92f7c8210d..0577de6fbfeeb 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1720,50 +1720,90 @@ are always available. They are listed here in alphabetical order. dictionary are ignored. -.. function:: zip(*iterables) - - Make an iterator that aggregates elements from each of the iterables. - - Returns an iterator of tuples, where the *i*-th tuple contains - the *i*-th element from each of the argument sequences or iterables. The - iterator stops when the shortest input iterable is exhausted. With a single - iterable argument, it returns an iterator of 1-tuples. With no arguments, - it returns an empty iterator. Equivalent to:: - - def zip(*iterables): - # zip('ABCD', 'xy') --> Ax By - sentinel = object() - iterators = [iter(it) for it in iterables] - while iterators: - result = [] - for it in iterators: - elem = next(it, sentinel) - if elem is sentinel: - return - result.append(elem) - yield tuple(result) - - The left-to-right evaluation order of the iterables is guaranteed. This - makes possible an idiom for clustering a data series into n-length groups - using ``zip(*[iter(s)]*n)``. This repeats the *same* iterator ``n`` times - so that each output tuple has the result of ``n`` calls to the iterator. - This has the effect of dividing the input into n-length chunks. - - :func:`zip` should only be used with unequal length inputs when you don't - care about trailing, unmatched values from the longer iterables. If those - values are important, use :func:`itertools.zip_longest` instead. - - :func:`zip` in conjunction with the ``*`` operator can be used to unzip a - list:: - - >>> x = [1, 2, 3] - >>> y = [4, 5, 6] - >>> zipped = zip(x, y) - >>> list(zipped) - [(1, 4), (2, 5), (3, 6)] - >>> x2, y2 = zip(*zip(x, y)) - >>> x == list(x2) and y == list(y2) - True +.. function:: zip(*iterables, strict=False) + + Iterate over several iterables in parallel, producing tuples with an item + from each one. + + Example:: + + >>> for item in zip([1, 2, 3], ['sugar', 'spice', 'everything nice']): + ... print(item) + ... + (1, 'sugar') + (2, 'spice') + (3, 'everything nice') + + More formally: :func:`zip` returns an iterator of tuples, where the *i*-th + tuple contains the *i*-th element from each of the argument iterables. + + Another way to think of :func:`zip` is that it turns rows into columns, and + columns into rows. This is similar to `transposing a matrix + `_. + + :func:`zip` is lazy: The elements won't be processed until the iterable is + iterated on, e.g. by a :keyword:`!for` loop or by wrapping in a + :class:`list`. + + One thing to consider is that the iterables passed to :func:`zip` could have + different lengths; sometimes by design, and sometimes because of a bug in + the code that prepared these iterables. Python offers three different + approaches to dealing with this issue: + + * By default, :func:`zip` stops when the shortest iterable is exhausted. + It will ignore the remaining items in the longer iterables, cutting off + the result to the length of the shortest iterable:: + + >>> list(zip(range(3), ['fee', 'fi', 'fo', 'fum'])) + [(0, 'fee'), (1, 'fi'), (2, 'fo')] + + * :func:`zip` is often used in cases where the iterables are assumed to be + of equal length. In such cases, it's recommended to use the ``strict=True`` + option. Its output is the same as regular :func:`zip`:: + + >>> list(zip(('a', 'b', 'c'), (1, 2, 3), strict=True)) + [('a', 1), ('b', 2), ('c', 3)] + + Unlike the default behavior, it checks that the lengths of iterables are + identical, raising a :exc:`ValueError` if they aren't: + + >>> list(zip(range(3), ['fee', 'fi', 'fo', 'fum'], strict=True)) + Traceback (most recent call last): + ... + ValueError: zip() argument 2 is longer than argument 1 + + Without the ``strict=True`` argument, any bug that results in iterables of + different lengths will be silenced, possibly mainfesting as a hard-to-find + bug in another part of the program. + + * Shorter iterables can be padded with a constant value to make all the + iterables have the same length. This is done by + :func:`itertools.zip_longest`. + + Edge cases: With a single iterable argument, :func:`zip` returns an + iterator of 1-tuples. With no arguments, it returns an empty iterator. + + Tips and tricks: + + * The left-to-right evaluation order of the iterables is guaranteed. This + makes possible an idiom for clustering a data series into n-length groups + using ``zip(*[iter(s)]*n, strict=True)``. This repeats the *same* iterator + ``n`` times so that each output tuple has the result of ``n`` calls to the + iterator. This has the effect of dividing the input into n-length chunks. + + * :func:`zip` in conjunction with the ``*`` operator can be used to unzip a + list:: + + >>> x = [1, 2, 3] + >>> y = [4, 5, 6] + >>> list(zip(x, y)) + [(1, 4), (2, 5), (3, 6)] + >>> x2, y2 = zip(*zip(x, y)) + >>> x == list(x2) and y == list(y2) + True + + .. versionchanged:: 3.10 + Added the ``strict`` argument. .. function:: __import__(name, globals=None, locals=None, fromlist=(), level=0) diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 9c1dca1152a64..89958450200f9 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -79,6 +79,9 @@ New Features :class:`types.MappingProxyType` object wrapping the original dictionary. (Contributed by Dennis Sweeney in :issue:`40890`.) +* :pep:`618`: The :func:`zip` function now has an optional ``strict`` flag, used + to require that all the iterables have an equal length. + Other Language Changes ====================== From webhook-mailer at python.org Fri Jun 19 17:06:14 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Fri, 19 Jun 2020 21:06:14 -0000 Subject: [Python-checkins] bpo-41040: Fix test_modulefinder. (GH-20991) Message-ID: https://github.com/python/cpython/commit/a041e116db5f1e78222cbf2c22aae96457372680 commit: a041e116db5f1e78222cbf2c22aae96457372680 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-20T00:06:07+03:00 summary: bpo-41040: Fix test_modulefinder. (GH-20991) files: M Lib/test/test_modulefinder.py diff --git a/Lib/test/test_modulefinder.py b/Lib/test/test_modulefinder.py index 23c7e5fb0f563..ca1058b8d4087 100644 --- a/Lib/test/test_modulefinder.py +++ b/Lib/test/test_modulefinder.py @@ -284,10 +284,11 @@ def foo(): pass # 0xe2 is not allowed in utf8 print('CP1252 test P\xe2t\xe9') import b_utf8 +""" + """\ b_utf8.py # use the default of utf8 print('Unicode test A code point 2090 \u2090 that is not valid in cp1252') -"""] +""".encode('utf-8')] def open_file(path): dirname = os.path.dirname(path) From webhook-mailer at python.org Fri Jun 19 17:25:33 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Fri, 19 Jun 2020 21:25:33 -0000 Subject: [Python-checkins] bpo-41040: Fix test_modulefinder. (GH-20991) Message-ID: https://github.com/python/cpython/commit/80651ab9e3f9d9a89e566a0b87a51b28720094f0 commit: 80651ab9e3f9d9a89e566a0b87a51b28720094f0 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-19T14:25:25-07:00 summary: bpo-41040: Fix test_modulefinder. (GH-20991) (cherry picked from commit a041e116db5f1e78222cbf2c22aae96457372680) Co-authored-by: Serhiy Storchaka files: M Lib/test/test_modulefinder.py diff --git a/Lib/test/test_modulefinder.py b/Lib/test/test_modulefinder.py index 23c7e5fb0f563..ca1058b8d4087 100644 --- a/Lib/test/test_modulefinder.py +++ b/Lib/test/test_modulefinder.py @@ -284,10 +284,11 @@ def foo(): pass # 0xe2 is not allowed in utf8 print('CP1252 test P\xe2t\xe9') import b_utf8 +""" + """\ b_utf8.py # use the default of utf8 print('Unicode test A code point 2090 \u2090 that is not valid in cp1252') -"""] +""".encode('utf-8')] def open_file(path): dirname = os.path.dirname(path) From webhook-mailer at python.org Sat Jun 20 02:27:10 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Sat, 20 Jun 2020 06:27:10 -0000 Subject: [Python-checkins] bpo-39503: CVE-2020-8492: Fix AbstractBasicAuthHandler (GH-18284) (#19305) Message-ID: https://github.com/python/cpython/commit/37fe316479e0b6906a74b0c0a5e495c55037fdfd commit: 37fe316479e0b6906a74b0c0a5e495c55037fdfd branch: 3.5 author: Victor Stinner committer: GitHub date: 2020-06-19T23:26:58-07:00 summary: bpo-39503: CVE-2020-8492: Fix AbstractBasicAuthHandler (GH-18284) (#19305) The AbstractBasicAuthHandler class of the urllib.request module uses an inefficient regular expression which can be exploited by an attacker to cause a denial of service. Fix the regex to prevent the catastrophic backtracking. Vulnerability reported by Ben Caller and Matt Schwager. AbstractBasicAuthHandler of urllib.request now parses all WWW-Authenticate HTTP headers and accepts multiple challenges per header: use the realm of the first Basic challenge. files: A Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst A Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst M Lib/test/test_urllib2.py M Lib/urllib/request.py diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py index 3ed81ce510511..8e5b68e68aa95 100644 --- a/Lib/test/test_urllib2.py +++ b/Lib/test/test_urllib2.py @@ -1361,40 +1361,64 @@ def test_osx_proxy_bypass(self): bypass = {'exclude_simple': True, 'exceptions': []} self.assertTrue(_proxy_bypass_macosx_sysconf('test', bypass)) - def test_basic_auth(self, quote_char='"'): - opener = OpenerDirector() - password_manager = MockPasswordManager() - auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager) - realm = "ACME Widget Store" - http_handler = MockHTTPHandler( - 401, 'WWW-Authenticate: Basic realm=%s%s%s\r\n\r\n' % - (quote_char, realm, quote_char)) - opener.add_handler(auth_handler) - opener.add_handler(http_handler) - self._test_basic_auth(opener, auth_handler, "Authorization", - realm, http_handler, password_manager, - "http://acme.example.com/protected", - "http://acme.example.com/protected", - ) - - def test_basic_auth_with_single_quoted_realm(self): - self.test_basic_auth(quote_char="'") - - def test_basic_auth_with_unquoted_realm(self): - opener = OpenerDirector() - password_manager = MockPasswordManager() - auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager) - realm = "ACME Widget Store" - http_handler = MockHTTPHandler( - 401, 'WWW-Authenticate: Basic realm=%s\r\n\r\n' % realm) - opener.add_handler(auth_handler) - opener.add_handler(http_handler) - with self.assertWarns(UserWarning): + def check_basic_auth(self, headers, realm): + with self.subTest(realm=realm, headers=headers): + opener = OpenerDirector() + password_manager = MockPasswordManager() + auth_handler = urllib.request.HTTPBasicAuthHandler(password_manager) + body = '\r\n'.join(headers) + '\r\n\r\n' + http_handler = MockHTTPHandler(401, body) + opener.add_handler(auth_handler) + opener.add_handler(http_handler) self._test_basic_auth(opener, auth_handler, "Authorization", - realm, http_handler, password_manager, - "http://acme.example.com/protected", - "http://acme.example.com/protected", - ) + realm, http_handler, password_manager, + "http://acme.example.com/protected", + "http://acme.example.com/protected") + + def test_basic_auth(self): + realm = "realm2 at example.com" + realm2 = "realm2 at example.com" + basic = 'Basic realm="{}"'.format(realm) + basic2 = 'Basic realm="{}"'.format(realm2) + other_no_realm = 'Otherscheme xxx' + digest = ('Digest realm="{}", ' + 'qop="auth, auth-int", ' + 'nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", ' + 'opaque="5ccc069c403ebaf9f0171e9517f40e41"').format(realm2) + for realm_str in ( + # test "quote" and 'quote' + 'Basic realm="{}"'.format(realm), + "Basic realm='{}'".format(realm), + + # charset is ignored + 'Basic realm="{}", charset="UTF-8"'.format(realm), + + # Multiple challenges per header + '{}, {}'.format(basic, basic2), + '{}, {}'.format(basic, other_no_realm), + '{}, {}'.format(other_no_realm, basic), + '{}, {}'.format(basic, digest), + '{}, {}'.format(digest, basic), + ): + headers = ['WWW-Authenticate: {}'.format(realm_str)] + self.check_basic_auth(headers, realm) + + # no quote: expect a warning + with support.check_warnings(("Basic Auth Realm was unquoted", + UserWarning)): + headers = ['WWW-Authenticate: Basic realm={}'.format(realm)] + self.check_basic_auth(headers, realm) + + # Multiple headers: one challenge per header. + # Use the first Basic realm. + for challenges in ( + [basic, basic2], + [basic, digest], + [digest, basic], + ): + headers = ['WWW-Authenticate: {}'.format(challenge) + for challenge in challenges] + self.check_basic_auth(headers, realm) def test_proxy_basic_auth(self): opener = OpenerDirector() diff --git a/Lib/urllib/request.py b/Lib/urllib/request.py index 3184ddab24a0b..ae3fd0d10b11d 100644 --- a/Lib/urllib/request.py +++ b/Lib/urllib/request.py @@ -885,8 +885,15 @@ class AbstractBasicAuthHandler: # allow for double- and single-quoted realm values # (single quotes are a violation of the RFC, but appear in the wild) - rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+' - 'realm=(["\']?)([^"\']*)\\2', re.I) + rx = re.compile('(?:^|,)' # start of the string or ',' + '[ \t]*' # optional whitespaces + '([^ \t]+)' # scheme like "Basic" + '[ \t]+' # mandatory whitespaces + # realm=xxx + # realm='xxx' + # realm="xxx" + 'realm=(["\']?)([^"\']*)\\2', + re.I) # XXX could pre-emptively send auth info already accepted (RFC 2617, # end of section 2, and section 1.2 immediately after "credentials" @@ -898,27 +905,51 @@ def __init__(self, password_mgr=None): self.passwd = password_mgr self.add_password = self.passwd.add_password + def _parse_realm(self, header): + # parse WWW-Authenticate header: accept multiple challenges per header + found_challenge = False + for mo in AbstractBasicAuthHandler.rx.finditer(header): + scheme, quote, realm = mo.groups() + if quote not in ['"', "'"]: + warnings.warn("Basic Auth Realm was unquoted", + UserWarning, 3) + + yield (scheme, realm) + + found_challenge = True + + if not found_challenge: + if header: + scheme = header.split()[0] + else: + scheme = '' + yield (scheme, None) + def http_error_auth_reqed(self, authreq, host, req, headers): # host may be an authority (without userinfo) or a URL with an # authority - # XXX could be multiple headers - authreq = headers.get(authreq, None) + headers = headers.get_all(authreq) + if not headers: + # no header found + return - if authreq: - scheme = authreq.split()[0] - if scheme.lower() != 'basic': - raise ValueError("AbstractBasicAuthHandler does not" - " support the following scheme: '%s'" % - scheme) - else: - mo = AbstractBasicAuthHandler.rx.search(authreq) - if mo: - scheme, quote, realm = mo.groups() - if quote not in ['"',"'"]: - warnings.warn("Basic Auth Realm was unquoted", - UserWarning, 2) - if scheme.lower() == 'basic': - return self.retry_http_basic_auth(host, req, realm) + unsupported = None + for header in headers: + for scheme, realm in self._parse_realm(header): + if scheme.lower() != 'basic': + unsupported = scheme + continue + + if realm is not None: + # Use the first matching Basic challenge. + # Ignore following challenges even if they use the Basic + # scheme. + return self.retry_http_basic_auth(host, req, realm) + + if unsupported is not None: + raise ValueError("AbstractBasicAuthHandler does not " + "support the following scheme: %r" + % (scheme,)) def retry_http_basic_auth(self, host, req, realm): user, pw = self.passwd.find_user_password(realm, host) diff --git a/Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst b/Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst new file mode 100644 index 0000000000000..be80ce79d91ed --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-03-25-16-02-16.bpo-39503.YmMbYn.rst @@ -0,0 +1,3 @@ +:class:`~urllib.request.AbstractBasicAuthHandler` of :mod:`urllib.request` +now parses all WWW-Authenticate HTTP headers and accepts multiple challenges +per header: use the realm of the first Basic challenge. diff --git a/Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst b/Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst new file mode 100644 index 0000000000000..9f2800581ca5e --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-01-30-16-15-29.bpo-39503.B299Yq.rst @@ -0,0 +1,5 @@ +CVE-2020-8492: The :class:`~urllib.request.AbstractBasicAuthHandler` class of the +:mod:`urllib.request` module uses an inefficient regular expression which can +be exploited by an attacker to cause a denial of service. Fix the regex to +prevent the catastrophic backtracking. Vulnerability reported by Ben Caller +and Matt Schwager. From webhook-mailer at python.org Sat Jun 20 02:44:08 2020 From: webhook-mailer at python.org (Tapas Kundu) Date: Sat, 20 Jun 2020 06:44:08 -0000 Subject: [Python-checkins] [3.5] closes bpo-38576: Disallow control characters in hostnames in http.client. (#19300) Message-ID: https://github.com/python/cpython/commit/09d8172837b6985c4ad90ee025f6b5a554a9f0ac commit: 09d8172837b6985c4ad90ee025f6b5a554a9f0ac branch: 3.5 author: Tapas Kundu <39723251+tapakund at users.noreply.github.com> committer: GitHub date: 2020-06-19T23:43:50-07:00 summary: [3.5] closes bpo-38576: Disallow control characters in hostnames in http.client. (#19300) Add host validation for control characters for more CVE-2019-18348 protection. (cherry picked from commit 83fc70159b24) files: A Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst M Lib/http/client.py M Lib/test/test_httplib.py M Lib/test/test_urllib.py diff --git a/Lib/http/client.py b/Lib/http/client.py index 85dc8028ef57f..5d5d7a7f707b4 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -771,6 +771,7 @@ def __init__(self, host, port=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT, (self.host, self.port) = self._get_hostport(host, port) + self._validate_host(self.host) # This is stored as an instance variable to allow unit # tests to replace it with a suitable mockup self._create_connection = socket.create_connection @@ -1085,6 +1086,17 @@ def _validate_path(self, url): ).format(matched=match.group(), **locals()) raise InvalidURL(msg) + def _validate_host(self, host): + """Validate a host so it doesn't contain control characters.""" + # Prevent CVE-2019-18348. + match = _contains_disallowed_url_pchar_re.search(host) + if match: + msg = ( + "URL can't contain control characters. {host!r} " + "(found at least {matched!r})" + ).format(matched=match.group(), host=host) + raise InvalidURL(msg) + def putheader(self, header, *values): """Send a request header line to the server. diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index c12a4298bb04e..a8e7a30af1a9f 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -986,7 +986,7 @@ def run_server(): thread.join() self.assertEqual(result, b"proxied data\n") - def test_putrequest_override_validation(self): + def test_putrequest_override_domain_validation(self): """ It should be possible to override the default validation behavior in putrequest (bpo-38216). @@ -999,6 +999,17 @@ def _validate_path(self, url): conn.sock = FakeSocket('') conn.putrequest('GET', '/\x00') + def test_putrequest_override_host_validation(self): + class UnsafeHTTPConnection(client.HTTPConnection): + def _validate_host(self, url): + pass + + conn = UnsafeHTTPConnection('example.com\r\n') + conn.sock = FakeSocket('') + # set skip_host so a ValueError is not raised upon adding the + # invalid URL as the value of the "Host:" header + conn.putrequest('GET', '/', skip_host=1) + def test_putrequest_override_encoding(self): """ It should be possible to override the default encoding diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 1e2c622e29fd8..d1074adb7c84d 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -331,7 +331,7 @@ def test_willclose(self): self.unfakehttp() @unittest.skipUnless(ssl, "ssl module required") - def test_url_with_control_char_rejected(self): + def test_url_path_with_control_char_rejected(self): for char_no in list(range(0, 0x21)) + [0x7f]: char = chr(char_no) schemeless_url = "//localhost:7777/test{}/".format(char) @@ -360,7 +360,7 @@ def test_url_with_control_char_rejected(self): self.unfakehttp() @unittest.skipUnless(ssl, "ssl module required") - def test_url_with_newline_header_injection_rejected(self): + def test_url_path_with_newline_header_injection_rejected(self): self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") host = "localhost:7777?a=1 HTTP/1.1\r\nX-injected: header\r\nTEST: 123" schemeless_url = "//" + host + ":8080/test/?test=a" @@ -385,6 +385,38 @@ def test_url_with_newline_header_injection_rejected(self): finally: self.unfakehttp() + @unittest.skipUnless(ssl, "ssl module required") + def test_url_host_with_control_char_rejected(self): + for char_no in list(range(0, 0x21)) + [0x7f]: + char = chr(char_no) + schemeless_url = "//localhost{}/test/".format(char) + self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") + try: + escaped_char_repr = repr(char).replace('\\', r'\\') + InvalidURL = http.client.InvalidURL + with self.assertRaisesRegex( + InvalidURL, r"contain control.*{}".format(escaped_char_repr)): + urlopen("http:{}".format(schemeless_url)) + with self.assertRaisesRegex(InvalidURL, r"contain control.*{}".format(escaped_char_repr)): + urlopen("http:{}".format(schemeless_url)) + finally: + self.unfakehttp() + + @unittest.skipUnless(ssl, "ssl module required") + def test_url_host_with_newline_header_injection_rejected(self): + self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello.") + host = "localhost\r\nX-injected: header\r\n" + schemeless_url = "//" + host + ":8080/test/?test=a" + try: + InvalidURL = http.client.InvalidURL + with self.assertRaisesRegex( + InvalidURL, r"contain control.*\\r"): + urlopen("http:{}".format(schemeless_url)) + with self.assertRaisesRegex(InvalidURL, r"contain control.*\\n"): + urlopen("http:{}".format(schemeless_url)) + finally: + self.unfakehttp() + def test_read_0_9(self): # "0.9" response accepted (but not "simple responses" without # a status line) diff --git a/Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst b/Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst new file mode 100644 index 0000000000000..1d03574651725 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-03-14-14-57-44.bpo-38576.OowwQn.rst @@ -0,0 +1 @@ +Disallow control characters in hostnames in http.client, addressing CVE-2019-18348. Such potentially malicious header injection URLs now cause a InvalidURL to be raised. From webhook-mailer at python.org Sat Jun 20 04:10:52 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sat, 20 Jun 2020 08:10:52 -0000 Subject: [Python-checkins] bpo-41043: Escape literal part of the path for glob(). (GH-20994) Message-ID: https://github.com/python/cpython/commit/935586845815f5b4c7814794413f6a812d4bd45f commit: 935586845815f5b4c7814794413f6a812d4bd45f branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-20T11:10:31+03:00 summary: bpo-41043: Escape literal part of the path for glob(). (GH-20994) files: A Misc/NEWS.d/next/Library/2020-06-20-00-19-30.bpo-41043.p-Pk-H.rst M Lib/distutils/command/build_py.py M Lib/idlelib/tree.py M Lib/imghdr.py M Lib/pdb.py M Lib/sndhdr.py M Lib/test/_test_multiprocessing.py M Lib/test/libregrtest/main.py M Lib/test/support/__init__.py M Lib/test/test_bz2.py M Lib/test/test_crashers.py M Lib/test/test_dbm.py M Lib/test/test_import/__init__.py M Lib/test/test_mailbox.py M Lib/test/test_regrtest.py M Lib/test/test_site.py M Lib/test/test_tokenize.py M Lib/test/test_unicode_file.py M Lib/webbrowser.py M Tools/c-analyzer/c_analyzer/common/files.py M Tools/c-analyzer/check-c-globals.py M Tools/peg_generator/scripts/test_parse_directory.py M Tools/ssl/make_ssl_data.py M setup.py diff --git a/Lib/distutils/command/build_py.py b/Lib/distutils/command/build_py.py index cf0ca57c32047..edc2171cd122d 100644 --- a/Lib/distutils/command/build_py.py +++ b/Lib/distutils/command/build_py.py @@ -5,7 +5,7 @@ import os import importlib.util import sys -from glob import glob +import glob from distutils.core import Command from distutils.errors import * @@ -125,7 +125,7 @@ def find_data_files(self, package, src_dir): files = [] for pattern in globs: # Each pattern has to be converted to a platform-specific path - filelist = glob(os.path.join(src_dir, convert_path(pattern))) + filelist = glob.glob(os.path.join(glob.escape(src_dir), convert_path(pattern))) # Files that match more than one pattern are only added once files.extend([fn for fn in filelist if fn not in files and os.path.isfile(fn)]) @@ -216,7 +216,7 @@ def check_module(self, module, module_file): def find_package_modules(self, package, package_dir): self.check_package(package, package_dir) - module_files = glob(os.path.join(package_dir, "*.py")) + module_files = glob.glob(os.path.join(glob.escape(package_dir), "*.py")) modules = [] setup_script = os.path.abspath(self.distribution.script_name) diff --git a/Lib/idlelib/tree.py b/Lib/idlelib/tree.py index 6229be4e5a8ad..5947268f5c35a 100644 --- a/Lib/idlelib/tree.py +++ b/Lib/idlelib/tree.py @@ -38,7 +38,7 @@ def listicons(icondir=ICONDIR): """Utility to display the available icons.""" root = Tk() import glob - list = glob.glob(os.path.join(icondir, "*.gif")) + list = glob.glob(os.path.join(glob.escape(icondir), "*.gif")) list.sort() images = [] row = column = 0 diff --git a/Lib/imghdr.py b/Lib/imghdr.py index 76e8abb2d5833..6e01fd857469a 100644 --- a/Lib/imghdr.py +++ b/Lib/imghdr.py @@ -152,7 +152,7 @@ def testall(list, recursive, toplevel): if recursive or toplevel: print('recursing down:') import glob - names = glob.glob(os.path.join(filename, '*')) + names = glob.glob(os.path.join(glob.escape(filename), '*')) testall(names, recursive, 0) else: print('*** directory (use -r) ***') diff --git a/Lib/pdb.py b/Lib/pdb.py index bf503f1e73ee1..701386e8b96c2 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -473,7 +473,7 @@ def _complete_location(self, text, line, begidx, endidx): except Exception: ret = [] # Then, try to complete file names as well. - globs = glob.glob(text + '*') + globs = glob.glob(glob.escape(text) + '*') for fn in globs: if os.path.isdir(fn): ret.append(fn + '/') diff --git a/Lib/sndhdr.py b/Lib/sndhdr.py index 594353136f5c3..96595c6974468 100644 --- a/Lib/sndhdr.py +++ b/Lib/sndhdr.py @@ -241,7 +241,7 @@ def testall(list, recursive, toplevel): if recursive or toplevel: print('recursing down:') import glob - names = glob.glob(os.path.join(filename, '*')) + names = glob.glob(os.path.join(glob.escape(filename), '*')) testall(names, recursive, 0) else: print('*** directory (use -r) ***') diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 444e234509c27..5f65d966d62ee 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -4260,7 +4260,7 @@ class _TestImportStar(unittest.TestCase): def get_module_names(self): import glob folder = os.path.dirname(multiprocessing.__file__) - pattern = os.path.join(folder, '*.py') + pattern = os.path.join(glob.escape(folder), '*.py') files = glob.glob(pattern) modules = [os.path.splitext(os.path.split(f)[1])[0] for f in files] modules = ['multiprocessing.' + m for m in modules] diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 95b4856c8bed7..adf31cc94940d 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -602,7 +602,7 @@ def create_temp_dir(self): def cleanup(self): import glob - path = os.path.join(self.tmp_dir, 'test_python_*') + path = os.path.join(glob.escape(self.tmp_dir), 'test_python_*') print("Cleanup %s directory" % self.tmp_dir) for name in glob.glob(path): if os.path.isdir(name): diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index bceb8cda20c35..5707d8eeaa28b 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -1345,7 +1345,7 @@ def _platform_specific(self): dll, os.path.join(dest_dir, os.path.basename(dll)) )) - for runtime in glob.glob(os.path.join(src_dir, "vcruntime*.dll")): + for runtime in glob.glob(os.path.join(glob.escape(src_dir), "vcruntime*.dll")): self._also_link.append(( runtime, os.path.join(dest_dir, os.path.basename(runtime)) diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index 91ccff2d0c07f..8f0773d55faef 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -70,7 +70,7 @@ class BaseTest(unittest.TestCase): # simply use the bigger test data for all tests. test_size = 0 BIG_TEXT = bytearray(128*1024) - for fname in glob.glob(os.path.join(os.path.dirname(__file__), '*.py')): + for fname in glob.glob(os.path.join(glob.escape(os.path.dirname(__file__)), '*.py')): with open(fname, 'rb') as fh: test_size += fh.readinto(memoryview(BIG_TEXT)[test_size:]) if test_size > 128*1024: diff --git a/Lib/test/test_crashers.py b/Lib/test/test_crashers.py index 58dfd001da362..31b712028f8a1 100644 --- a/Lib/test/test_crashers.py +++ b/Lib/test/test_crashers.py @@ -11,7 +11,7 @@ from test.support.script_helper import assert_python_failure CRASHER_DIR = os.path.join(os.path.dirname(__file__), "crashers") -CRASHER_FILES = os.path.join(CRASHER_DIR, "*.py") +CRASHER_FILES = os.path.join(glob.escape(CRASHER_DIR), "*.py") infinite_loops = ["infinite_loop_re.py", "nasty_eq_vs_dict.py"] diff --git a/Lib/test/test_dbm.py b/Lib/test/test_dbm.py index 1db3bef6f4136..571da973aab0e 100644 --- a/Lib/test/test_dbm.py +++ b/Lib/test/test_dbm.py @@ -33,7 +33,7 @@ def dbm_iterator(): def delete_files(): # we don't know the precise name the underlying database uses # so we use glob to locate all names - for f in glob.glob(_fname + "*"): + for f in glob.glob(glob.escape(_fname) + "*"): test.support.unlink(f) diff --git a/Lib/test/test_import/__init__.py b/Lib/test/test_import/__init__.py index 060d145970ee9..a04cf65945e93 100644 --- a/Lib/test/test_import/__init__.py +++ b/Lib/test/test_import/__init__.py @@ -486,7 +486,7 @@ def test_dll_dependency_import(self): pyexe = os.path.join(tmp, os.path.basename(sys.executable)) shutil.copy(sys.executable, pyexe) shutil.copy(dllname, tmp) - for f in glob.glob(os.path.join(sys.prefix, "vcruntime*.dll")): + for f in glob.glob(os.path.join(glob.escape(sys.prefix), "vcruntime*.dll")): shutil.copy(f, tmp) shutil.copy(pydname, tmp2) diff --git a/Lib/test/test_mailbox.py b/Lib/test/test_mailbox.py index fdda1d11d3307..6f891d413cd8f 100644 --- a/Lib/test/test_mailbox.py +++ b/Lib/test/test_mailbox.py @@ -979,7 +979,7 @@ def tearDown(self): super().tearDown() self._box.close() self._delete_recursively(self._path) - for lock_remnant in glob.glob(self._path + '.*'): + for lock_remnant in glob.glob(glob.escape(self._path) + '.*'): support.unlink(lock_remnant) def assertMailboxEmpty(self): @@ -1311,7 +1311,7 @@ def tearDown(self): super().tearDown() self._box.close() self._delete_recursively(self._path) - for lock_remnant in glob.glob(self._path + '.*'): + for lock_remnant in glob.glob(glob.escape(self._path) + '.*'): support.unlink(lock_remnant) def test_labels(self): diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index de209da41a34d..6745be6fea1ac 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -556,7 +556,7 @@ def test_finds_expected_number_of_tests(self): args = ['-Wd', '-E', '-bb', '-m', 'test.regrtest', '--list-tests'] output = self.run_python(args) rough_number_of_tests_found = len(output.splitlines()) - actual_testsuite_glob = os.path.join(os.path.dirname(__file__), + actual_testsuite_glob = os.path.join(glob.escape(os.path.dirname(__file__)), 'test*.py') rough_counted_test_py_files = len(glob.glob(actual_testsuite_glob)) # We're not trying to duplicate test finding logic in here, diff --git a/Lib/test/test_site.py b/Lib/test/test_site.py index 9f4a8bc64f7ee..9751c64c99e74 100644 --- a/Lib/test/test_site.py +++ b/Lib/test/test_site.py @@ -543,7 +543,7 @@ def test_startup_imports(self): # found in sys.path (see site.addpackage()). Skip the test if at least # one .pth file is found. for path in isolated_paths: - pth_files = glob.glob(os.path.join(path, "*.pth")) + pth_files = glob.glob(os.path.join(glob.escape(path), "*.pth")) if pth_files: self.skipTest(f"found {len(pth_files)} .pth files in: {path}") diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py index 4c90092893a22..6de7aa87bb2f9 100644 --- a/Lib/test/test_tokenize.py +++ b/Lib/test/test_tokenize.py @@ -1605,7 +1605,7 @@ def test_random_files(self): import glob, random fn = support.findfile("tokenize_tests.txt") tempdir = os.path.dirname(fn) or os.curdir - testfiles = glob.glob(os.path.join(tempdir, "test*.py")) + testfiles = glob.glob(os.path.join(glob.escape(tempdir), "test*.py")) # Tokenize is broken on test_pep3131.py because regular expressions are # broken on the obscure unicode identifiers in it. *sigh* diff --git a/Lib/test/test_unicode_file.py b/Lib/test/test_unicode_file.py index ed1f6cecc7856..46a0d062540b7 100644 --- a/Lib/test/test_unicode_file.py +++ b/Lib/test/test_unicode_file.py @@ -41,7 +41,7 @@ def _do_single(self, filename): self._do_copyish(filename, filename) # Filename should appear in glob output self.assertTrue( - os.path.abspath(filename)==os.path.abspath(glob.glob(filename)[0])) + os.path.abspath(filename)==os.path.abspath(glob.glob(glob.escape(filename))[0])) # basename should appear in listdir. path, base = os.path.split(os.path.abspath(filename)) file_list = os.listdir(path) diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 3dcf66b659825..31e1df4247946 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -413,7 +413,7 @@ def _find_grail_rc(self): tempdir = os.path.join(tempfile.gettempdir(), ".grail-unix") user = pwd.getpwuid(os.getuid())[0] - filename = os.path.join(tempdir, user + "-*") + filename = os.path.join(glob.escape(tempdir), glob.escape(user) + "-*") maybes = glob.glob(filename) if not maybes: return None diff --git a/Misc/NEWS.d/next/Library/2020-06-20-00-19-30.bpo-41043.p-Pk-H.rst b/Misc/NEWS.d/next/Library/2020-06-20-00-19-30.bpo-41043.p-Pk-H.rst new file mode 100644 index 0000000000000..9c6020eb8d738 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-00-19-30.bpo-41043.p-Pk-H.rst @@ -0,0 +1,2 @@ +Fixed the use of :func:`~glob.glob` in the stdlib: literal part of the path +is now always correctly escaped. diff --git a/Tools/c-analyzer/c_analyzer/common/files.py b/Tools/c-analyzer/c_analyzer/common/files.py index ab551a84bad15..f630afe625924 100644 --- a/Tools/c-analyzer/c_analyzer/common/files.py +++ b/Tools/c-analyzer/c_analyzer/common/files.py @@ -41,6 +41,8 @@ def walk_tree(root, *, def glob_tree(root, *, suffix=None, _glob=glob.iglob, + _escape=glob.escape, + _join=os.path.join, ): """Yield each file in the tree under the given directory name. @@ -51,9 +53,9 @@ def glob_tree(root, *, if not isinstance(suffix, str): raise ValueError('suffix must be a string') - for filename in _glob(f'{root}/*{suffix}'): + for filename in _glob(_join(_escape(root), f'*{suffix}')): yield filename - for filename in _glob(f'{root}/**/*{suffix}'): + for filename in _glob(_join(_escape(root), f'**/*{suffix}')): yield filename diff --git a/Tools/c-analyzer/check-c-globals.py b/Tools/c-analyzer/check-c-globals.py index e68ed9271fe48..1371f92742327 100644 --- a/Tools/c-analyzer/check-c-globals.py +++ b/Tools/c-analyzer/check-c-globals.py @@ -37,7 +37,9 @@ def find_capi_vars(root): capi_vars = {} for dirname in SOURCE_DIRS: - for filename in glob.glob(os.path.join(ROOT_DIR, dirname, '**/*.[hc]'), + for filename in glob.glob(os.path.join( + glob.escape(os.path.join(ROOT_DIR, dirname)), + '**/*.[hc]'), recursive=True): with open(filename) as file: for name in _find_capi_vars(file): diff --git a/Tools/peg_generator/scripts/test_parse_directory.py b/Tools/peg_generator/scripts/test_parse_directory.py index d8f4f0ecd3e05..a5e26f0a0feda 100755 --- a/Tools/peg_generator/scripts/test_parse_directory.py +++ b/Tools/peg_generator/scripts/test_parse_directory.py @@ -7,7 +7,7 @@ import time import traceback import tokenize -from glob import glob +from glob import glob, escape from pathlib import PurePath from typing import List, Optional, Any, Tuple @@ -109,7 +109,7 @@ def parse_directory(directory: str, verbose: bool, excluded_files: List[str], sh files = [] total_seconds = 0 - for file in sorted(glob(f"{directory}/**/*.py", recursive=True)): + for file in sorted(glob(os.path.join(escape(directory), f"**/*.py"), recursive=True)): # Only attempt to parse Python files and files that are not excluded if any(PurePath(file).match(pattern) for pattern in excluded_files): continue diff --git a/Tools/ssl/make_ssl_data.py b/Tools/ssl/make_ssl_data.py index a29c04ab57183..1dc234f5232b1 100755 --- a/Tools/ssl/make_ssl_data.py +++ b/Tools/ssl/make_ssl_data.py @@ -39,7 +39,7 @@ def parse_error_codes(h_file, prefix, libcode): f = sys.stdout if use_stdout else open(outfile, "w") # mnemonic -> (library code, error prefix, header file) error_libraries = {} - for error_header in glob.glob(os.path.join(openssl_inc, 'include/openssl/*err.h')): + for error_header in glob.glob(os.path.join(glob.escape(openssl_inc), 'include/openssl/*err.h')): base = os.path.basename(error_header) if base in ('buffererr.h', 'objectserr.h', 'storeerr.h'): # Deprecated in 3.0. diff --git a/setup.py b/setup.py index b220f5279ca63..648e4e6a8932e 100644 --- a/setup.py +++ b/setup.py @@ -8,7 +8,7 @@ import re import sys import sysconfig -from glob import glob +from glob import glob, escape try: @@ -401,7 +401,7 @@ def update_sources_depends(self): # Python header files headers = [sysconfig.get_config_h_filename()] - headers += glob(os.path.join(sysconfig.get_path('include'), "*.h")) + headers += glob(os.path.join(escape(sysconfig.get_path('include')), "*.h")) for ext in self.extensions: ext.sources = [ find_module_file(filename, moddirlist) @@ -2431,7 +2431,7 @@ def detect_hash_builtins(self): if "blake2" in configured: blake2_deps = glob( - os.path.join(self.srcdir, 'Modules/_blake2/impl/*') + os.path.join(escape(self.srcdir), 'Modules/_blake2/impl/*') ) blake2_deps.append('hashlib.h') self.add(Extension( @@ -2446,7 +2446,7 @@ def detect_hash_builtins(self): if "sha3" in configured: sha3_deps = glob( - os.path.join(self.srcdir, 'Modules/_sha3/kcp/*') + os.path.join(escape(self.srcdir), 'Modules/_sha3/kcp/*') ) sha3_deps.append('hashlib.h') self.add(Extension( From webhook-mailer at python.org Sat Jun 20 08:57:31 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sat, 20 Jun 2020 12:57:31 -0000 Subject: [Python-checkins] bpo-40958: Avoid 'possible loss of data' warning on Windows (GH-20970) Message-ID: https://github.com/python/cpython/commit/861efc6e8fe7f030b1e193989b13287b31385939 commit: 861efc6e8fe7f030b1e193989b13287b31385939 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-20T05:57:27-07:00 summary: bpo-40958: Avoid 'possible loss of data' warning on Windows (GH-20970) files: M Parser/pegen.c M Parser/pegen.h diff --git a/Parser/pegen.c b/Parser/pegen.c index b374740308a36..594754cee5d53 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -397,7 +397,7 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, } if (p->start_rule == Py_file_input) { - error_line = PyErr_ProgramTextObject(p->tok->filename, lineno); + error_line = PyErr_ProgramTextObject(p->tok->filename, (int) lineno); } if (!error_line) { diff --git a/Parser/pegen.h b/Parser/pegen.h index 43168074c3500..ef095dda49fd7 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -34,7 +34,7 @@ typedef struct _memo { typedef struct { int type; PyObject *bytes; - Py_ssize_t lineno, col_offset, end_lineno, end_col_offset; + int lineno, col_offset, end_lineno, end_col_offset; Memo *memo; } Token; From webhook-mailer at python.org Sat Jun 20 13:10:34 2020 From: webhook-mailer at python.org (Brian Rutledge) Date: Sat, 20 Jun 2020 17:10:34 -0000 Subject: [Python-checkins] Add link to .pypirc specification (GH-20680) Message-ID: https://github.com/python/cpython/commit/af157fad286c00ff204e86d8556648cbb53ba99e commit: af157fad286c00ff204e86d8556648cbb53ba99e branch: master author: Brian Rutledge committer: GitHub date: 2020-06-20T10:10:25-07:00 summary: Add link to .pypirc specification (GH-20680) Related to https://github.com/pypa/twine/issues/638 and https://github.com/pypa/packaging.python.org/issues/730, I wrote a spec based on the one that was removed in https://github.com/python/cpython/pull/13087. However, a Google search for "pypirc" turned up at least one [blog post](https://truveris.github.io/articles/configuring-pypirc/) that links to https://docs.python.org/3/distutils/packageindex.html#the-pypirc-file, which now just links to this document. So, I thought a link to the spec would be handy. Automerge-Triggered-By: @jaraco files: M Doc/distributing/index.rst diff --git a/Doc/distributing/index.rst b/Doc/distributing/index.rst index 5f7b3bbc4f917..02379946244d8 100644 --- a/Doc/distributing/index.rst +++ b/Doc/distributing/index.rst @@ -128,6 +128,7 @@ involved in creating and publishing a project: * `Project structure`_ * `Building and packaging the project`_ * `Uploading the project to the Python Packaging Index`_ +* `The .pypirc file`_ .. _Project structure: \ https://packaging.python.org/tutorials/distributing-packages/ @@ -135,6 +136,8 @@ involved in creating and publishing a project: https://packaging.python.org/tutorials/distributing-packages/#packaging-your-project .. _Uploading the project to the Python Packaging Index: \ https://packaging.python.org/tutorials/distributing-packages/#uploading-your-project-to-pypi +.. _The .pypirc file: \ + https://packaging.python.org/specifications/pypirc/ How do I...? From webhook-mailer at python.org Sat Jun 20 13:40:11 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 20 Jun 2020 17:40:11 -0000 Subject: [Python-checkins] bpo-41044: Generate valid PEG python parsers for opt+seq rules (GH-20995) Message-ID: https://github.com/python/cpython/commit/55460ee6dc9a4f16bd68d6b6be3a8398c7d4a596 commit: 55460ee6dc9a4f16bd68d6b6be3a8398c7d4a596 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-06-20T18:40:06+01:00 summary: bpo-41044: Generate valid PEG python parsers for opt+seq rules (GH-20995) Co-authored-by: Pablo Galindo files: M Lib/test/test_peg_generator/test_pegen.py M Tools/peg_generator/pegen/python_generator.py diff --git a/Lib/test/test_peg_generator/test_pegen.py b/Lib/test/test_peg_generator/test_pegen.py index 30e1b675643b2..5b4e964d698ad 100644 --- a/Lib/test/test_peg_generator/test_pegen.py +++ b/Lib/test/test_peg_generator/test_pegen.py @@ -493,6 +493,14 @@ def test_start_leader(self) -> None: # Would assert False without a special case in compute_left_recursives(). make_parser(grammar) + def test_opt_sequence(self) -> None: + grammar = """ + start: [NAME*] + """ + # This case was failing because of a double trailing comma at the end + # of a line in the generated source. See bpo-41044 + make_parser(grammar) + def test_left_recursion_too_complex(self) -> None: grammar = """ start: foo diff --git a/Tools/peg_generator/pegen/python_generator.py b/Tools/peg_generator/pegen/python_generator.py index 64336552f24f6..45a75975dbf5e 100644 --- a/Tools/peg_generator/pegen/python_generator.py +++ b/Tools/peg_generator/pegen/python_generator.py @@ -93,7 +93,13 @@ def visit_NegativeLookahead(self, node: NegativeLookahead) -> Tuple[None, str]: def visit_Opt(self, node: Opt) -> Tuple[str, str]: name, call = self.visit(node.node) - return "opt", f"{call}," # Note trailing comma! + # Note trailing comma (the call may already have one comma + # at the end, for example when rules have both repeat0 and optional + # markers, e.g: [rule*]) + if call.endswith(","): + return "opt", call + else: + return "opt", f"{call}," def visit_Repeat0(self, node: Repeat0) -> Tuple[str, str]: if node in self.cache: From webhook-mailer at python.org Sat Jun 20 14:07:30 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sat, 20 Jun 2020 18:07:30 -0000 Subject: [Python-checkins] bpo-40939: Remove the old parser (Part 2) (GH-21005) Message-ID: https://github.com/python/cpython/commit/314858e2763e76e77029ea0b691d749c32939087 commit: 314858e2763e76e77029ea0b691d749c32939087 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-20T19:07:25+01:00 summary: bpo-40939: Remove the old parser (Part 2) (GH-21005) Remove some remaining files and Makefile targets for the old parser files: A Misc/NEWS.d/next/Core and Builtins/2020-06-20-16-59-02.bpo-40939.6810Ak.rst D Include/bitset.h D Include/graminit.h D Include/grammar.h D Include/node.h D Include/parsetok.h D Lib/symbol.py D Lib/test/test_symbol.py D Parser/node.c D Python/graminit.c D Tools/scripts/generate_symbol_py.py M .gitattributes M Include/ast.h M Include/compile.h M Makefile.pre.in M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M PCbuild/regen.vcxproj M Python/ast.c M Python/future.c M Python/peephole.c M Python/pythonrun.c M configure M configure.ac diff --git a/.gitattributes b/.gitattributes index bec16a08152eb..598d8f0f159ae 100644 --- a/.gitattributes +++ b/.gitattributes @@ -41,8 +41,6 @@ PC/readme.txt text eol=crlf # Generated files # https://github.com/github/linguist#generated-code -Include/graminit.h linguist-generated=true -Python/graminit.h linguist-generated=true Modules/clinic/*.h linguist-generated=true Objects/clinic/*.h linguist-generated=true PC/clinic/*.h linguist-generated=true diff --git a/Include/ast.h b/Include/ast.h index a8c52af786b14..de42a3b5e6f91 100644 --- a/Include/ast.h +++ b/Include/ast.h @@ -6,19 +6,8 @@ extern "C" { #endif #include "Python-ast.h" /* mod_ty */ -#include "node.h" /* node */ PyAPI_FUNC(int) PyAST_Validate(mod_ty); -PyAPI_FUNC(mod_ty) PyAST_FromNode( - const node *n, - PyCompilerFlags *flags, - const char *filename, /* decoded from the filesystem encoding */ - PyArena *arena); -PyAPI_FUNC(mod_ty) PyAST_FromNodeObject( - const node *n, - PyCompilerFlags *flags, - PyObject *filename, - PyArena *arena); /* _PyAST_ExprAsUnicode is defined in ast_unparse.c */ PyAPI_FUNC(PyObject *) _PyAST_ExprAsUnicode(expr_ty); diff --git a/Include/bitset.h b/Include/bitset.h deleted file mode 100644 index 6a2ac9787eaba..0000000000000 --- a/Include/bitset.h +++ /dev/null @@ -1,23 +0,0 @@ - -#ifndef Py_BITSET_H -#define Py_BITSET_H -#ifdef __cplusplus -extern "C" { -#endif - -/* Bitset interface */ - -#define BYTE char -typedef BYTE *bitset; - -#define testbit(ss, ibit) (((ss)[BIT2BYTE(ibit)] & BIT2MASK(ibit)) != 0) - -#define BITSPERBYTE (8*sizeof(BYTE)) -#define BIT2BYTE(ibit) ((ibit) / BITSPERBYTE) -#define BIT2SHIFT(ibit) ((ibit) % BITSPERBYTE) -#define BIT2MASK(ibit) (1 << BIT2SHIFT(ibit)) - -#ifdef __cplusplus -} -#endif -#endif /* !Py_BITSET_H */ diff --git a/Include/compile.h b/Include/compile.h index 12417ce805464..4dd5435ce71a9 100644 --- a/Include/compile.h +++ b/Include/compile.h @@ -8,10 +8,6 @@ extern "C" { #endif /* Public interface */ -struct _node; /* Declare the existence of this type */ -PyAPI_FUNC(PyCodeObject *) PyNode_Compile(struct _node *, const char *); -/* XXX (ncoghlan): Unprefixed type name in a public API! */ - #define PyCF_MASK (CO_FUTURE_DIVISION | CO_FUTURE_ABSOLUTE_IMPORT | \ CO_FUTURE_WITH_STATEMENT | CO_FUTURE_PRINT_FUNCTION | \ CO_FUTURE_UNICODE_LITERALS | CO_FUTURE_BARRY_AS_BDFL | \ diff --git a/Include/graminit.h b/Include/graminit.h deleted file mode 100644 index d1027b7a743f2..0000000000000 --- a/Include/graminit.h +++ /dev/null @@ -1,94 +0,0 @@ -/* Generated by Parser/pgen */ - -#define single_input 256 -#define file_input 257 -#define eval_input 258 -#define decorator 259 -#define decorators 260 -#define decorated 261 -#define async_funcdef 262 -#define funcdef 263 -#define parameters 264 -#define typedargslist 265 -#define tfpdef 266 -#define varargslist 267 -#define vfpdef 268 -#define stmt 269 -#define simple_stmt 270 -#define small_stmt 271 -#define expr_stmt 272 -#define annassign 273 -#define testlist_star_expr 274 -#define augassign 275 -#define del_stmt 276 -#define pass_stmt 277 -#define flow_stmt 278 -#define break_stmt 279 -#define continue_stmt 280 -#define return_stmt 281 -#define yield_stmt 282 -#define raise_stmt 283 -#define import_stmt 284 -#define import_name 285 -#define import_from 286 -#define import_as_name 287 -#define dotted_as_name 288 -#define import_as_names 289 -#define dotted_as_names 290 -#define dotted_name 291 -#define global_stmt 292 -#define nonlocal_stmt 293 -#define assert_stmt 294 -#define compound_stmt 295 -#define async_stmt 296 -#define if_stmt 297 -#define while_stmt 298 -#define for_stmt 299 -#define try_stmt 300 -#define with_stmt 301 -#define with_item 302 -#define except_clause 303 -#define suite 304 -#define namedexpr_test 305 -#define test 306 -#define test_nocond 307 -#define lambdef 308 -#define lambdef_nocond 309 -#define or_test 310 -#define and_test 311 -#define not_test 312 -#define comparison 313 -#define comp_op 314 -#define star_expr 315 -#define expr 316 -#define xor_expr 317 -#define and_expr 318 -#define shift_expr 319 -#define arith_expr 320 -#define term 321 -#define factor 322 -#define power 323 -#define atom_expr 324 -#define atom 325 -#define testlist_comp 326 -#define trailer 327 -#define subscriptlist 328 -#define subscript 329 -#define sliceop 330 -#define exprlist 331 -#define testlist 332 -#define dictorsetmaker 333 -#define classdef 334 -#define arglist 335 -#define argument 336 -#define comp_iter 337 -#define sync_comp_for 338 -#define comp_for 339 -#define comp_if 340 -#define encoding_decl 341 -#define yield_expr 342 -#define yield_arg 343 -#define func_body_suite 344 -#define func_type_input 345 -#define func_type 346 -#define typelist 347 diff --git a/Include/grammar.h b/Include/grammar.h deleted file mode 100644 index 4b66b1e9b9745..0000000000000 --- a/Include/grammar.h +++ /dev/null @@ -1,77 +0,0 @@ - -/* Grammar interface */ - -#ifndef Py_GRAMMAR_H -#define Py_GRAMMAR_H -#ifdef __cplusplus -extern "C" { -#endif - -#include "bitset.h" /* Sigh... */ - -/* A label of an arc */ - -typedef struct { - int lb_type; - const char *lb_str; -} label; - -#define EMPTY 0 /* Label number 0 is by definition the empty label */ - -/* A list of labels */ - -typedef struct { - int ll_nlabels; - const label *ll_label; -} labellist; - -/* An arc from one state to another */ - -typedef struct { - short a_lbl; /* Label of this arc */ - short a_arrow; /* State where this arc goes to */ -} arc; - -/* A state in a DFA */ - -typedef struct { - int s_narcs; - const arc *s_arc; /* Array of arcs */ - - /* Optional accelerators */ - int s_lower; /* Lowest label index */ - int s_upper; /* Highest label index */ - int *s_accel; /* Accelerator */ - int s_accept; /* Nonzero for accepting state */ -} state; - -/* A DFA */ - -typedef struct { - int d_type; /* Non-terminal this represents */ - char *d_name; /* For printing */ - int d_nstates; - state *d_state; /* Array of states */ - bitset d_first; -} dfa; - -/* A grammar */ - -typedef struct { - int g_ndfas; - const dfa *g_dfa; /* Array of DFAs */ - const labellist g_ll; - int g_start; /* Start symbol of the grammar */ - int g_accel; /* Set if accelerators present */ -} grammar; - -/* FUNCTIONS */ -const dfa *PyGrammar_FindDFA(grammar *g, int type); -const char *PyGrammar_LabelRepr(label *lb); -void PyGrammar_AddAccelerators(grammar *g); -void PyGrammar_RemoveAccelerators(grammar *); - -#ifdef __cplusplus -} -#endif -#endif /* !Py_GRAMMAR_H */ diff --git a/Include/node.h b/Include/node.h deleted file mode 100644 index ca24f28908592..0000000000000 --- a/Include/node.h +++ /dev/null @@ -1,47 +0,0 @@ - -/* Parse tree node interface */ - -#ifndef Py_NODE_H -#define Py_NODE_H -#ifdef __cplusplus -extern "C" { -#endif - -typedef struct _node { - short n_type; - char *n_str; - int n_lineno; - int n_col_offset; - int n_nchildren; - struct _node *n_child; - int n_end_lineno; - int n_end_col_offset; -} node; - -PyAPI_FUNC(node *) PyNode_New(int type); -PyAPI_FUNC(int) PyNode_AddChild(node *n, int type, - char *str, int lineno, int col_offset, - int end_lineno, int end_col_offset); -PyAPI_FUNC(void) PyNode_Free(node *n); -#ifndef Py_LIMITED_API -PyAPI_FUNC(Py_ssize_t) _PyNode_SizeOf(node *n); -#endif - -/* Node access functions */ -#define NCH(n) ((n)->n_nchildren) - -#define CHILD(n, i) (&(n)->n_child[i]) -#define TYPE(n) ((n)->n_type) -#define STR(n) ((n)->n_str) -#define LINENO(n) ((n)->n_lineno) - -/* Assert that the type of a node is what we expect */ -#define REQ(n, type) assert(TYPE(n) == (type)) - -PyAPI_FUNC(void) PyNode_ListTree(node *); -void _PyNode_FinalizeEndPos(node *n); // helper also used in parsetok.c - -#ifdef __cplusplus -} -#endif -#endif /* !Py_NODE_H */ diff --git a/Include/parsetok.h b/Include/parsetok.h deleted file mode 100644 index 935d733e90a5a..0000000000000 --- a/Include/parsetok.h +++ /dev/null @@ -1,110 +0,0 @@ -/* Parser-tokenizer link interface */ - -#ifndef Py_LIMITED_API -#ifndef Py_PARSETOK_H -#define Py_PARSETOK_H -#ifdef __cplusplus -extern "C" { -#endif - -#include "grammar.h" /* grammar */ -#include "node.h" /* node */ - -typedef struct { - int error; - PyObject *filename; - int lineno; - int offset; - char *text; /* UTF-8-encoded string */ - int token; - int expected; -} perrdetail; - -#if 0 -#define PyPARSE_YIELD_IS_KEYWORD 0x0001 -#endif - -#define PyPARSE_DONT_IMPLY_DEDENT 0x0002 - -#if 0 -#define PyPARSE_WITH_IS_KEYWORD 0x0003 -#define PyPARSE_PRINT_IS_FUNCTION 0x0004 -#define PyPARSE_UNICODE_LITERALS 0x0008 -#endif - -#define PyPARSE_IGNORE_COOKIE 0x0010 -#define PyPARSE_BARRY_AS_BDFL 0x0020 -#define PyPARSE_TYPE_COMMENTS 0x0040 -#define PyPARSE_ASYNC_HACKS 0x0080 - -PyAPI_FUNC(node *) PyParser_ParseString(const char *, grammar *, int, - perrdetail *); -PyAPI_FUNC(node *) PyParser_ParseFile (FILE *, const char *, grammar *, int, - const char *, const char *, - perrdetail *); - -PyAPI_FUNC(node *) PyParser_ParseStringFlags(const char *, grammar *, int, - perrdetail *, int); -PyAPI_FUNC(node *) PyParser_ParseFileFlags( - FILE *fp, - const char *filename, /* decoded from the filesystem encoding */ - const char *enc, - grammar *g, - int start, - const char *ps1, - const char *ps2, - perrdetail *err_ret, - int flags); -PyAPI_FUNC(node *) PyParser_ParseFileFlagsEx( - FILE *fp, - const char *filename, /* decoded from the filesystem encoding */ - const char *enc, - grammar *g, - int start, - const char *ps1, - const char *ps2, - perrdetail *err_ret, - int *flags); -PyAPI_FUNC(node *) PyParser_ParseFileObject( - FILE *fp, - PyObject *filename, - const char *enc, - grammar *g, - int start, - const char *ps1, - const char *ps2, - perrdetail *err_ret, - int *flags); - -PyAPI_FUNC(node *) PyParser_ParseStringFlagsFilename( - const char *s, - const char *filename, /* decoded from the filesystem encoding */ - grammar *g, - int start, - perrdetail *err_ret, - int flags); -PyAPI_FUNC(node *) PyParser_ParseStringFlagsFilenameEx( - const char *s, - const char *filename, /* decoded from the filesystem encoding */ - grammar *g, - int start, - perrdetail *err_ret, - int *flags); -PyAPI_FUNC(node *) PyParser_ParseStringObject( - const char *s, - PyObject *filename, - grammar *g, - int start, - perrdetail *err_ret, - int *flags); - -/* Note that the following functions are defined in pythonrun.c, - not in parsetok.c */ -PyAPI_FUNC(void) PyParser_SetError(perrdetail *); -PyAPI_FUNC(void) PyParser_ClearError(perrdetail *); - -#ifdef __cplusplus -} -#endif -#endif /* !Py_PARSETOK_H */ -#endif /* !Py_LIMITED_API */ diff --git a/Lib/symbol.py b/Lib/symbol.py deleted file mode 100644 index aaac8c914431b..0000000000000 --- a/Lib/symbol.py +++ /dev/null @@ -1,122 +0,0 @@ -"""Non-terminal symbols of Python grammar (from "graminit.h").""" - -# This file is automatically generated; please don't muck it up! -# -# To update the symbols in this file, 'cd' to the top directory of -# the python source tree after building the interpreter and run: -# -# python3 Tools/scripts/generate_symbol_py.py Include/graminit.h Lib/symbol.py -# -# or just -# -# make regen-symbol - -import warnings - -warnings.warn( - "The symbol module is deprecated and will be removed " - "in future versions of Python", - DeprecationWarning, - stacklevel=2, -) - -#--start constants-- -single_input = 256 -file_input = 257 -eval_input = 258 -decorator = 259 -decorators = 260 -decorated = 261 -async_funcdef = 262 -funcdef = 263 -parameters = 264 -typedargslist = 265 -tfpdef = 266 -varargslist = 267 -vfpdef = 268 -stmt = 269 -simple_stmt = 270 -small_stmt = 271 -expr_stmt = 272 -annassign = 273 -testlist_star_expr = 274 -augassign = 275 -del_stmt = 276 -pass_stmt = 277 -flow_stmt = 278 -break_stmt = 279 -continue_stmt = 280 -return_stmt = 281 -yield_stmt = 282 -raise_stmt = 283 -import_stmt = 284 -import_name = 285 -import_from = 286 -import_as_name = 287 -dotted_as_name = 288 -import_as_names = 289 -dotted_as_names = 290 -dotted_name = 291 -global_stmt = 292 -nonlocal_stmt = 293 -assert_stmt = 294 -compound_stmt = 295 -async_stmt = 296 -if_stmt = 297 -while_stmt = 298 -for_stmt = 299 -try_stmt = 300 -with_stmt = 301 -with_item = 302 -except_clause = 303 -suite = 304 -namedexpr_test = 305 -test = 306 -test_nocond = 307 -lambdef = 308 -lambdef_nocond = 309 -or_test = 310 -and_test = 311 -not_test = 312 -comparison = 313 -comp_op = 314 -star_expr = 315 -expr = 316 -xor_expr = 317 -and_expr = 318 -shift_expr = 319 -arith_expr = 320 -term = 321 -factor = 322 -power = 323 -atom_expr = 324 -atom = 325 -testlist_comp = 326 -trailer = 327 -subscriptlist = 328 -subscript = 329 -sliceop = 330 -exprlist = 331 -testlist = 332 -dictorsetmaker = 333 -classdef = 334 -arglist = 335 -argument = 336 -comp_iter = 337 -sync_comp_for = 338 -comp_for = 339 -comp_if = 340 -encoding_decl = 341 -yield_expr = 342 -yield_arg = 343 -func_body_suite = 344 -func_type_input = 345 -func_type = 346 -typelist = 347 -#--end constants-- - -sym_name = {} -for _name, _value in list(globals().items()): - if type(_value) is type(0): - sym_name[_value] = _name -del _name, _value diff --git a/Lib/test/test_symbol.py b/Lib/test/test_symbol.py deleted file mode 100644 index 645d8f43b6cd9..0000000000000 --- a/Lib/test/test_symbol.py +++ /dev/null @@ -1,58 +0,0 @@ -import unittest -from test import support -import os -import sys -import sysconfig -import subprocess - - -SYMBOL_FILE = support.findfile('symbol.py') -GEN_SYMBOL_FILE = os.path.join(os.path.dirname(__file__), - '..', '..', 'Tools', 'scripts', - 'generate_symbol_py.py') -GRAMMAR_FILE = os.path.join(os.path.dirname(__file__), - '..', '..', 'Include', 'graminit.h') -TEST_PY_FILE = 'symbol_test.py' - - -class TestSymbolGeneration(unittest.TestCase): - - def _copy_file_without_generated_symbols(self, source_file, dest_file): - with open(source_file) as fp: - lines = fp.readlines() - with open(dest_file, 'w') as fp: - fp.writelines(lines[:lines.index("#--start constants--\n") + 1]) - fp.writelines(lines[lines.index("#--end constants--\n"):]) - - def _generate_symbols(self, grammar_file, target_symbol_py_file): - proc = subprocess.Popen([sys.executable, - GEN_SYMBOL_FILE, - grammar_file, - target_symbol_py_file], stderr=subprocess.PIPE) - stderr = proc.communicate()[1] - return proc.returncode, stderr - - def compare_files(self, file1, file2): - with open(file1) as fp: - lines1 = fp.readlines() - with open(file2) as fp: - lines2 = fp.readlines() - self.assertEqual(lines1, lines2) - - @unittest.skipUnless(sysconfig.is_python_build(), - 'test only works from source build directory') - def test_real_grammar_and_symbol_file(self): - output = support.TESTFN - self.addCleanup(support.unlink, output) - - self._copy_file_without_generated_symbols(SYMBOL_FILE, output) - - exitcode, stderr = self._generate_symbols(GRAMMAR_FILE, output) - self.assertEqual(b'', stderr) - self.assertEqual(0, exitcode) - - self.compare_files(SYMBOL_FILE, output) - - -if __name__ == "__main__": - unittest.main() diff --git a/Makefile.pre.in b/Makefile.pre.in index 684171217167b..fc6dc434e0a18 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -344,7 +344,6 @@ PYTHON_OBJS= \ Python/getcopyright.o \ Python/getplatform.o \ Python/getversion.o \ - Python/graminit.o \ Python/hamt.o \ Python/hashtable.o \ Python/import.o \ @@ -742,7 +741,7 @@ regen-importlib: Programs/_freeze_importlib # Regenerate all generated files regen-all: regen-opcode regen-opcode-targets regen-typeslots \ - regen-token regen-symbol regen-ast regen-importlib clinic \ + regen-token regen-ast regen-importlib clinic \ regen-pegen-metaparser regen-pegen ############################################################################ @@ -881,15 +880,7 @@ regen-keyword: $(srcdir)/Lib/keyword.py.new $(UPDATE_FILE) $(srcdir)/Lib/keyword.py $(srcdir)/Lib/keyword.py.new -.PHONY: regen-symbol -regen-symbol: $(srcdir)/Include/graminit.h - # Regenerate Lib/symbol.py from Include/graminit.h - # using Tools/scripts/generate_symbol_py.py - $(PYTHON_FOR_REGEN) $(srcdir)/Tools/scripts/generate_symbol_py.py \ - $(srcdir)/Include/graminit.h \ - $(srcdir)/Lib/symbol.py - -Python/compile.o Python/symtable.o Python/ast_unparse.o Python/ast.o Python/future.o Parser/parsetok.o: $(srcdir)/Include/graminit.h $(srcdir)/Include/Python-ast.h +Python/compile.o Python/symtable.o Python/ast_unparse.o Python/ast.o Python/future.o: $(srcdir)/Include/Python-ast.h Python/getplatform.o: $(srcdir)/Python/getplatform.c $(CC) -c $(PY_CORE_CFLAGS) -DPLATFORM='"$(MACHDEP)"' -o $@ $(srcdir)/Python/getplatform.c @@ -989,7 +980,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/abstract.h \ $(srcdir)/Include/asdl.h \ $(srcdir)/Include/ast.h \ - $(srcdir)/Include/bitset.h \ $(srcdir)/Include/bltinmodule.h \ $(srcdir)/Include/boolobject.h \ $(srcdir)/Include/bytearrayobject.h \ @@ -1027,7 +1017,6 @@ PYTHON_HEADERS= \ $(srcdir)/Include/modsupport.h \ $(srcdir)/Include/moduleobject.h \ $(srcdir)/Include/namespaceobject.h \ - $(srcdir)/Include/node.h \ $(srcdir)/Include/object.h \ $(srcdir)/Include/objimpl.h \ $(srcdir)/Include/odictobject.h \ diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-20-16-59-02.bpo-40939.6810Ak.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-16-59-02.bpo-40939.6810Ak.rst new file mode 100644 index 0000000000000..8a626d479a91c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-16-59-02.bpo-40939.6810Ak.rst @@ -0,0 +1 @@ +Remove the remaining files from the old parser and the :mod:`symbol` module. \ No newline at end of file diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 2653ce9111d18..0f9110e08d65b 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -115,7 +115,6 @@ - @@ -162,8 +161,6 @@ - - @@ -209,14 +206,12 @@ - - @@ -451,7 +446,6 @@ - diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 6ea7afaa03681..12f05acc3a74a 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -42,9 +42,6 @@ Include - - Include - Include @@ -183,12 +180,6 @@ Include - - Include - - - Include - Include @@ -318,9 +309,6 @@ Include - - Include - Include @@ -336,9 +324,6 @@ Include - - Include - Include @@ -1010,9 +995,6 @@ Python - - Python - Python diff --git a/PCbuild/regen.vcxproj b/PCbuild/regen.vcxproj index ea246ffc8ff25..90d6dc68d5404 100644 --- a/PCbuild/regen.vcxproj +++ b/PCbuild/regen.vcxproj @@ -133,10 +133,6 @@ - - - - @@ -202,18 +198,12 @@ - - - - - - diff --git a/Parser/node.c b/Parser/node.c deleted file mode 100644 index 8789e01e9b848..0000000000000 --- a/Parser/node.c +++ /dev/null @@ -1,189 +0,0 @@ -/* Parse tree node implementation */ - -#include "Python.h" -#include "node.h" -#include "errcode.h" - -node * -PyNode_New(int type) -{ - node *n = (node *) PyObject_MALLOC(1 * sizeof(node)); - if (n == NULL) - return NULL; - n->n_type = type; - n->n_str = NULL; - n->n_lineno = 0; - n->n_end_lineno = 0; - n->n_col_offset = 0; - n->n_end_col_offset = -1; - n->n_nchildren = 0; - n->n_child = NULL; - return n; -} - -/* See comments at XXXROUNDUP below. Returns -1 on overflow. */ -static int -fancy_roundup(int n) -{ - /* Round up to the closest power of 2 >= n. */ - int result = 256; - assert(n > 128); - while (result < n) { - result <<= 1; - if (result <= 0) - return -1; - } - return result; -} - -/* A gimmick to make massive numbers of reallocs quicker. The result is - * a number >= the input. In PyNode_AddChild, it's used like so, when - * we're about to add child number current_size + 1: - * - * if XXXROUNDUP(current_size) < XXXROUNDUP(current_size + 1): - * allocate space for XXXROUNDUP(current_size + 1) total children - * else: - * we already have enough space - * - * Since a node starts out empty, we must have - * - * XXXROUNDUP(0) < XXXROUNDUP(1) - * - * so that we allocate space for the first child. One-child nodes are very - * common (presumably that would change if we used a more abstract form - * of syntax tree), so to avoid wasting memory it's desirable that - * XXXROUNDUP(1) == 1. That in turn forces XXXROUNDUP(0) == 0. - * - * Else for 2 <= n <= 128, we round up to the closest multiple of 4. Why 4? - * Rounding up to a multiple of an exact power of 2 is very efficient, and - * most nodes with more than one child have <= 4 kids. - * - * Else we call fancy_roundup() to grow proportionately to n. We've got an - * extreme case then (like test_longexp.py), and on many platforms doing - * anything less than proportional growth leads to exorbitant runtime - * (e.g., MacPython), or extreme fragmentation of user address space (e.g., - * Win98). - * - * In a run of compileall across the 2.3a0 Lib directory, Andrew MacIntyre - * reported that, with this scheme, 89% of PyObject_REALLOC calls in - * PyNode_AddChild passed 1 for the size, and 9% passed 4. So this usually - * wastes very little memory, but is very effective at sidestepping - * platform-realloc disasters on vulnerable platforms. - * - * Note that this would be straightforward if a node stored its current - * capacity. The code is tricky to avoid that. - */ -#define XXXROUNDUP(n) ((n) <= 1 ? (n) : \ - (n) <= 128 ? (int)_Py_SIZE_ROUND_UP((n), 4) : \ - fancy_roundup(n)) - - -void -_PyNode_FinalizeEndPos(node *n) -{ - int nch = NCH(n); - node *last; - if (nch == 0) { - return; - } - last = CHILD(n, nch - 1); - _PyNode_FinalizeEndPos(last); - n->n_end_lineno = last->n_end_lineno; - n->n_end_col_offset = last->n_end_col_offset; -} - -int -PyNode_AddChild(node *n1, int type, char *str, int lineno, int col_offset, - int end_lineno, int end_col_offset) -{ - const int nch = n1->n_nchildren; - int current_capacity; - int required_capacity; - node *n; - - // finalize end position of previous node (if any) - if (nch > 0) { - _PyNode_FinalizeEndPos(CHILD(n1, nch - 1)); - } - - if (nch == INT_MAX || nch < 0) - return E_OVERFLOW; - - current_capacity = XXXROUNDUP(nch); - required_capacity = XXXROUNDUP(nch + 1); - if (current_capacity < 0 || required_capacity < 0) - return E_OVERFLOW; - if (current_capacity < required_capacity) { - if ((size_t)required_capacity > SIZE_MAX / sizeof(node)) { - return E_NOMEM; - } - n = n1->n_child; - n = (node *) PyObject_REALLOC(n, - required_capacity * sizeof(node)); - if (n == NULL) - return E_NOMEM; - n1->n_child = n; - } - - n = &n1->n_child[n1->n_nchildren++]; - n->n_type = type; - n->n_str = str; - n->n_lineno = lineno; - n->n_col_offset = col_offset; - n->n_end_lineno = end_lineno; // this and below will be updates after all children are added. - n->n_end_col_offset = end_col_offset; - n->n_nchildren = 0; - n->n_child = NULL; - return 0; -} - -/* Forward */ -static void freechildren(node *); -static Py_ssize_t sizeofchildren(node *n); - - -void -PyNode_Free(node *n) -{ - if (n != NULL) { - freechildren(n); - PyObject_FREE(n); - } -} - -Py_ssize_t -_PyNode_SizeOf(node *n) -{ - Py_ssize_t res = 0; - - if (n != NULL) - res = sizeof(node) + sizeofchildren(n); - return res; -} - -static void -freechildren(node *n) -{ - int i; - for (i = NCH(n); --i >= 0; ) - freechildren(CHILD(n, i)); - if (n->n_child != NULL) - PyObject_FREE(n->n_child); - if (STR(n) != NULL) - PyObject_FREE(STR(n)); -} - -static Py_ssize_t -sizeofchildren(node *n) -{ - Py_ssize_t res = 0; - int i; - for (i = NCH(n); --i >= 0; ) - res += sizeofchildren(CHILD(n, i)); - if (n->n_child != NULL) - /* allocated size of n->n_child array */ - res += XXXROUNDUP(NCH(n)) * sizeof(node); - if (STR(n) != NULL) - res += strlen(STR(n)) + 1; - return res; -} diff --git a/Python/ast.c b/Python/ast.c index d7feb8ce852fd..7bf66e50aa14d 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -5,7 +5,6 @@ */ #include "Python.h" #include "Python-ast.h" -#include "node.h" #include "ast.h" #include "token.h" #include "pythonrun.h" diff --git a/Python/future.c b/Python/future.c index 1663a38a6fdb3..56da4d8c798b8 100644 --- a/Python/future.c +++ b/Python/future.c @@ -1,8 +1,6 @@ #include "Python.h" #include "Python-ast.h" -#include "node.h" #include "token.h" -#include "graminit.h" #include "code.h" #include "symtable.h" #include "ast.h" diff --git a/Python/graminit.c b/Python/graminit.c deleted file mode 100644 index b7aa52895f8ae..0000000000000 --- a/Python/graminit.c +++ /dev/null @@ -1,2688 +0,0 @@ -/* Generated by Parser/pgen */ - -#include "exports.h" -#include "grammar.h" -Py_EXPORTED_SYMBOL grammar _PyParser_Grammar; -static const arc arcs_0_0[3] = { - {2, 1}, - {3, 2}, - {4, 1}, -}; -static const arc arcs_0_1[1] = { - {0, 1}, -}; -static const arc arcs_0_2[1] = { - {2, 1}, -}; -static state states_0[3] = { - {3, arcs_0_0}, - {1, arcs_0_1}, - {1, arcs_0_2}, -}; -static const arc arcs_1_0[3] = { - {44, 1}, - {2, 0}, - {45, 0}, -}; -static const arc arcs_1_1[1] = { - {0, 1}, -}; -static state states_1[2] = { - {3, arcs_1_0}, - {1, arcs_1_1}, -}; -static const arc arcs_2_0[1] = { - {47, 1}, -}; -static const arc arcs_2_1[2] = { - {44, 2}, - {2, 1}, -}; -static const arc arcs_2_2[1] = { - {0, 2}, -}; -static state states_2[3] = { - {1, arcs_2_0}, - {2, arcs_2_1}, - {1, arcs_2_2}, -}; -static const arc arcs_3_0[1] = { - {10, 1}, -}; -static const arc arcs_3_1[1] = { - {49, 2}, -}; -static const arc arcs_3_2[1] = { - {2, 3}, -}; -static const arc arcs_3_3[1] = { - {0, 3}, -}; -static state states_3[4] = { - {1, arcs_3_0}, - {1, arcs_3_1}, - {1, arcs_3_2}, - {1, arcs_3_3}, -}; -static const arc arcs_4_0[1] = { - {48, 1}, -}; -static const arc arcs_4_1[2] = { - {48, 1}, - {0, 1}, -}; -static state states_4[2] = { - {1, arcs_4_0}, - {2, arcs_4_1}, -}; -static const arc arcs_5_0[1] = { - {50, 1}, -}; -static const arc arcs_5_1[3] = { - {52, 2}, - {53, 2}, - {54, 2}, -}; -static const arc arcs_5_2[1] = { - {0, 2}, -}; -static state states_5[3] = { - {1, arcs_5_0}, - {3, arcs_5_1}, - {1, arcs_5_2}, -}; -static const arc arcs_6_0[1] = { - {38, 1}, -}; -static const arc arcs_6_1[1] = { - {54, 2}, -}; -static const arc arcs_6_2[1] = { - {0, 2}, -}; -static state states_6[3] = { - {1, arcs_6_0}, - {1, arcs_6_1}, - {1, arcs_6_2}, -}; -static const arc arcs_7_0[1] = { - {19, 1}, -}; -static const arc arcs_7_1[1] = { - {40, 2}, -}; -static const arc arcs_7_2[1] = { - {55, 3}, -}; -static const arc arcs_7_3[2] = { - {56, 4}, - {57, 5}, -}; -static const arc arcs_7_4[1] = { - {58, 6}, -}; -static const arc arcs_7_5[2] = { - {59, 7}, - {60, 8}, -}; -static const arc arcs_7_6[1] = { - {57, 5}, -}; -static const arc arcs_7_7[1] = { - {60, 8}, -}; -static const arc arcs_7_8[1] = { - {0, 8}, -}; -static state states_7[9] = { - {1, arcs_7_0}, - {1, arcs_7_1}, - {1, arcs_7_2}, - {2, arcs_7_3}, - {1, arcs_7_4}, - {2, arcs_7_5}, - {1, arcs_7_6}, - {1, arcs_7_7}, - {1, arcs_7_8}, -}; -static const arc arcs_8_0[1] = { - {5, 1}, -}; -static const arc arcs_8_1[2] = { - {61, 2}, - {62, 3}, -}; -static const arc arcs_8_2[1] = { - {0, 2}, -}; -static const arc arcs_8_3[1] = { - {61, 2}, -}; -static state states_8[4] = { - {1, arcs_8_0}, - {2, arcs_8_1}, - {1, arcs_8_2}, - {1, arcs_8_3}, -}; -static const arc arcs_9_0[3] = { - {6, 1}, - {63, 2}, - {64, 3}, -}; -static const arc arcs_9_1[4] = { - {65, 4}, - {59, 5}, - {64, 6}, - {0, 1}, -}; -static const arc arcs_9_2[1] = { - {64, 7}, -}; -static const arc arcs_9_3[4] = { - {65, 8}, - {66, 9}, - {59, 5}, - {0, 3}, -}; -static const arc arcs_9_4[4] = { - {63, 2}, - {59, 10}, - {64, 11}, - {0, 4}, -}; -static const arc arcs_9_5[1] = { - {0, 5}, -}; -static const arc arcs_9_6[3] = { - {65, 4}, - {59, 5}, - {0, 6}, -}; -static const arc arcs_9_7[3] = { - {65, 12}, - {59, 5}, - {0, 7}, -}; -static const arc arcs_9_8[6] = { - {6, 13}, - {63, 2}, - {67, 14}, - {59, 15}, - {64, 3}, - {0, 8}, -}; -static const arc arcs_9_9[1] = { - {58, 16}, -}; -static const arc arcs_9_10[3] = { - {63, 2}, - {64, 11}, - {0, 10}, -}; -static const arc arcs_9_11[4] = { - {65, 4}, - {66, 17}, - {59, 5}, - {0, 11}, -}; -static const arc arcs_9_12[2] = { - {59, 5}, - {0, 12}, -}; -static const arc arcs_9_13[4] = { - {65, 18}, - {59, 5}, - {64, 19}, - {0, 13}, -}; -static const arc arcs_9_14[2] = { - {65, 20}, - {0, 14}, -}; -static const arc arcs_9_15[5] = { - {6, 13}, - {63, 2}, - {67, 14}, - {64, 3}, - {0, 15}, -}; -static const arc arcs_9_16[3] = { - {65, 8}, - {59, 5}, - {0, 16}, -}; -static const arc arcs_9_17[1] = { - {58, 6}, -}; -static const arc arcs_9_18[4] = { - {63, 2}, - {59, 21}, - {64, 22}, - {0, 18}, -}; -static const arc arcs_9_19[3] = { - {65, 18}, - {59, 5}, - {0, 19}, -}; -static const arc arcs_9_20[5] = { - {6, 23}, - {63, 2}, - {59, 24}, - {64, 25}, - {0, 20}, -}; -static const arc arcs_9_21[3] = { - {63, 2}, - {64, 22}, - {0, 21}, -}; -static const arc arcs_9_22[4] = { - {65, 18}, - {66, 26}, - {59, 5}, - {0, 22}, -}; -static const arc arcs_9_23[4] = { - {65, 27}, - {59, 5}, - {64, 28}, - {0, 23}, -}; -static const arc arcs_9_24[1] = { - {64, 25}, -}; -static const arc arcs_9_25[4] = { - {65, 29}, - {66, 30}, - {59, 5}, - {0, 25}, -}; -static const arc arcs_9_26[1] = { - {58, 19}, -}; -static const arc arcs_9_27[4] = { - {63, 2}, - {59, 31}, - {64, 32}, - {0, 27}, -}; -static const arc arcs_9_28[3] = { - {65, 27}, - {59, 5}, - {0, 28}, -}; -static const arc arcs_9_29[5] = { - {6, 33}, - {63, 2}, - {59, 34}, - {64, 25}, - {0, 29}, -}; -static const arc arcs_9_30[1] = { - {58, 35}, -}; -static const arc arcs_9_31[3] = { - {63, 2}, - {64, 32}, - {0, 31}, -}; -static const arc arcs_9_32[4] = { - {65, 27}, - {66, 36}, - {59, 5}, - {0, 32}, -}; -static const arc arcs_9_33[4] = { - {65, 37}, - {59, 5}, - {64, 38}, - {0, 33}, -}; -static const arc arcs_9_34[4] = { - {6, 33}, - {63, 2}, - {64, 25}, - {0, 34}, -}; -static const arc arcs_9_35[3] = { - {65, 29}, - {59, 5}, - {0, 35}, -}; -static const arc arcs_9_36[1] = { - {58, 28}, -}; -static const arc arcs_9_37[4] = { - {63, 2}, - {59, 39}, - {64, 40}, - {0, 37}, -}; -static const arc arcs_9_38[3] = { - {65, 37}, - {59, 5}, - {0, 38}, -}; -static const arc arcs_9_39[3] = { - {63, 2}, - {64, 40}, - {0, 39}, -}; -static const arc arcs_9_40[4] = { - {65, 37}, - {66, 41}, - {59, 5}, - {0, 40}, -}; -static const arc arcs_9_41[1] = { - {58, 38}, -}; -static state states_9[42] = { - {3, arcs_9_0}, - {4, arcs_9_1}, - {1, arcs_9_2}, - {4, arcs_9_3}, - {4, arcs_9_4}, - {1, arcs_9_5}, - {3, arcs_9_6}, - {3, arcs_9_7}, - {6, arcs_9_8}, - {1, arcs_9_9}, - {3, arcs_9_10}, - {4, arcs_9_11}, - {2, arcs_9_12}, - {4, arcs_9_13}, - {2, arcs_9_14}, - {5, arcs_9_15}, - {3, arcs_9_16}, - {1, arcs_9_17}, - {4, arcs_9_18}, - {3, arcs_9_19}, - {5, arcs_9_20}, - {3, arcs_9_21}, - {4, arcs_9_22}, - {4, arcs_9_23}, - {1, arcs_9_24}, - {4, arcs_9_25}, - {1, arcs_9_26}, - {4, arcs_9_27}, - {3, arcs_9_28}, - {5, arcs_9_29}, - {1, arcs_9_30}, - {3, arcs_9_31}, - {4, arcs_9_32}, - {4, arcs_9_33}, - {4, arcs_9_34}, - {3, arcs_9_35}, - {1, arcs_9_36}, - {4, arcs_9_37}, - {3, arcs_9_38}, - {3, arcs_9_39}, - {4, arcs_9_40}, - {1, arcs_9_41}, -}; -static const arc arcs_10_0[1] = { - {40, 1}, -}; -static const arc arcs_10_1[2] = { - {57, 2}, - {0, 1}, -}; -static const arc arcs_10_2[1] = { - {58, 3}, -}; -static const arc arcs_10_3[1] = { - {0, 3}, -}; -static state states_10[4] = { - {1, arcs_10_0}, - {2, arcs_10_1}, - {1, arcs_10_2}, - {1, arcs_10_3}, -}; -static const arc arcs_11_0[3] = { - {6, 1}, - {63, 2}, - {69, 3}, -}; -static const arc arcs_11_1[3] = { - {65, 4}, - {69, 5}, - {0, 1}, -}; -static const arc arcs_11_2[1] = { - {69, 6}, -}; -static const arc arcs_11_3[3] = { - {65, 7}, - {66, 8}, - {0, 3}, -}; -static const arc arcs_11_4[3] = { - {63, 2}, - {69, 9}, - {0, 4}, -}; -static const arc arcs_11_5[2] = { - {65, 4}, - {0, 5}, -}; -static const arc arcs_11_6[2] = { - {65, 10}, - {0, 6}, -}; -static const arc arcs_11_7[5] = { - {6, 11}, - {63, 2}, - {67, 12}, - {69, 3}, - {0, 7}, -}; -static const arc arcs_11_8[1] = { - {58, 13}, -}; -static const arc arcs_11_9[3] = { - {65, 4}, - {66, 14}, - {0, 9}, -}; -static const arc arcs_11_10[1] = { - {0, 10}, -}; -static const arc arcs_11_11[3] = { - {65, 15}, - {69, 16}, - {0, 11}, -}; -static const arc arcs_11_12[2] = { - {65, 17}, - {0, 12}, -}; -static const arc arcs_11_13[2] = { - {65, 7}, - {0, 13}, -}; -static const arc arcs_11_14[1] = { - {58, 5}, -}; -static const arc arcs_11_15[3] = { - {63, 2}, - {69, 18}, - {0, 15}, -}; -static const arc arcs_11_16[2] = { - {65, 15}, - {0, 16}, -}; -static const arc arcs_11_17[4] = { - {6, 19}, - {63, 2}, - {69, 20}, - {0, 17}, -}; -static const arc arcs_11_18[3] = { - {65, 15}, - {66, 21}, - {0, 18}, -}; -static const arc arcs_11_19[3] = { - {65, 22}, - {69, 23}, - {0, 19}, -}; -static const arc arcs_11_20[3] = { - {65, 24}, - {66, 25}, - {0, 20}, -}; -static const arc arcs_11_21[1] = { - {58, 16}, -}; -static const arc arcs_11_22[3] = { - {63, 2}, - {69, 26}, - {0, 22}, -}; -static const arc arcs_11_23[2] = { - {65, 22}, - {0, 23}, -}; -static const arc arcs_11_24[4] = { - {6, 27}, - {63, 2}, - {69, 20}, - {0, 24}, -}; -static const arc arcs_11_25[1] = { - {58, 28}, -}; -static const arc arcs_11_26[3] = { - {65, 22}, - {66, 29}, - {0, 26}, -}; -static const arc arcs_11_27[3] = { - {65, 30}, - {69, 31}, - {0, 27}, -}; -static const arc arcs_11_28[2] = { - {65, 24}, - {0, 28}, -}; -static const arc arcs_11_29[1] = { - {58, 23}, -}; -static const arc arcs_11_30[3] = { - {63, 2}, - {69, 32}, - {0, 30}, -}; -static const arc arcs_11_31[2] = { - {65, 30}, - {0, 31}, -}; -static const arc arcs_11_32[3] = { - {65, 30}, - {66, 33}, - {0, 32}, -}; -static const arc arcs_11_33[1] = { - {58, 31}, -}; -static state states_11[34] = { - {3, arcs_11_0}, - {3, arcs_11_1}, - {1, arcs_11_2}, - {3, arcs_11_3}, - {3, arcs_11_4}, - {2, arcs_11_5}, - {2, arcs_11_6}, - {5, arcs_11_7}, - {1, arcs_11_8}, - {3, arcs_11_9}, - {1, arcs_11_10}, - {3, arcs_11_11}, - {2, arcs_11_12}, - {2, arcs_11_13}, - {1, arcs_11_14}, - {3, arcs_11_15}, - {2, arcs_11_16}, - {4, arcs_11_17}, - {3, arcs_11_18}, - {3, arcs_11_19}, - {3, arcs_11_20}, - {1, arcs_11_21}, - {3, arcs_11_22}, - {2, arcs_11_23}, - {4, arcs_11_24}, - {1, arcs_11_25}, - {3, arcs_11_26}, - {3, arcs_11_27}, - {2, arcs_11_28}, - {1, arcs_11_29}, - {3, arcs_11_30}, - {2, arcs_11_31}, - {3, arcs_11_32}, - {1, arcs_11_33}, -}; -static const arc arcs_12_0[1] = { - {40, 1}, -}; -static const arc arcs_12_1[1] = { - {0, 1}, -}; -static state states_12[2] = { - {1, arcs_12_0}, - {1, arcs_12_1}, -}; -static const arc arcs_13_0[2] = { - {3, 1}, - {4, 1}, -}; -static const arc arcs_13_1[1] = { - {0, 1}, -}; -static state states_13[2] = { - {2, arcs_13_0}, - {1, arcs_13_1}, -}; -static const arc arcs_14_0[1] = { - {70, 1}, -}; -static const arc arcs_14_1[2] = { - {71, 2}, - {2, 3}, -}; -static const arc arcs_14_2[2] = { - {2, 3}, - {70, 1}, -}; -static const arc arcs_14_3[1] = { - {0, 3}, -}; -static state states_14[4] = { - {1, arcs_14_0}, - {2, arcs_14_1}, - {2, arcs_14_2}, - {1, arcs_14_3}, -}; -static const arc arcs_15_0[8] = { - {72, 1}, - {73, 1}, - {74, 1}, - {75, 1}, - {76, 1}, - {77, 1}, - {78, 1}, - {79, 1}, -}; -static const arc arcs_15_1[1] = { - {0, 1}, -}; -static state states_15[2] = { - {8, arcs_15_0}, - {1, arcs_15_1}, -}; -static const arc arcs_16_0[1] = { - {80, 1}, -}; -static const arc arcs_16_1[4] = { - {66, 2}, - {81, 3}, - {82, 4}, - {0, 1}, -}; -static const arc arcs_16_2[2] = { - {80, 5}, - {83, 5}, -}; -static const arc arcs_16_3[1] = { - {0, 3}, -}; -static const arc arcs_16_4[2] = { - {47, 3}, - {83, 3}, -}; -static const arc arcs_16_5[3] = { - {66, 2}, - {59, 3}, - {0, 5}, -}; -static state states_16[6] = { - {1, arcs_16_0}, - {4, arcs_16_1}, - {2, arcs_16_2}, - {1, arcs_16_3}, - {2, arcs_16_4}, - {3, arcs_16_5}, -}; -static const arc arcs_17_0[1] = { - {57, 1}, -}; -static const arc arcs_17_1[1] = { - {58, 2}, -}; -static const arc arcs_17_2[2] = { - {66, 3}, - {0, 2}, -}; -static const arc arcs_17_3[2] = { - {80, 4}, - {83, 4}, -}; -static const arc arcs_17_4[1] = { - {0, 4}, -}; -static state states_17[5] = { - {1, arcs_17_0}, - {1, arcs_17_1}, - {2, arcs_17_2}, - {2, arcs_17_3}, - {1, arcs_17_4}, -}; -static const arc arcs_18_0[2] = { - {84, 1}, - {58, 1}, -}; -static const arc arcs_18_1[2] = { - {65, 2}, - {0, 1}, -}; -static const arc arcs_18_2[3] = { - {84, 1}, - {58, 1}, - {0, 2}, -}; -static state states_18[3] = { - {2, arcs_18_0}, - {2, arcs_18_1}, - {3, arcs_18_2}, -}; -static const arc arcs_19_0[13] = { - {85, 1}, - {86, 1}, - {87, 1}, - {88, 1}, - {89, 1}, - {90, 1}, - {91, 1}, - {92, 1}, - {93, 1}, - {94, 1}, - {95, 1}, - {96, 1}, - {97, 1}, -}; -static const arc arcs_19_1[1] = { - {0, 1}, -}; -static state states_19[2] = { - {13, arcs_19_0}, - {1, arcs_19_1}, -}; -static const arc arcs_20_0[1] = { - {20, 1}, -}; -static const arc arcs_20_1[1] = { - {98, 2}, -}; -static const arc arcs_20_2[1] = { - {0, 2}, -}; -static state states_20[3] = { - {1, arcs_20_0}, - {1, arcs_20_1}, - {1, arcs_20_2}, -}; -static const arc arcs_21_0[1] = { - {29, 1}, -}; -static const arc arcs_21_1[1] = { - {0, 1}, -}; -static state states_21[2] = { - {1, arcs_21_0}, - {1, arcs_21_1}, -}; -static const arc arcs_22_0[5] = { - {99, 1}, - {100, 1}, - {101, 1}, - {102, 1}, - {103, 1}, -}; -static const arc arcs_22_1[1] = { - {0, 1}, -}; -static state states_22[2] = { - {5, arcs_22_0}, - {1, arcs_22_1}, -}; -static const arc arcs_23_0[1] = { - {16, 1}, -}; -static const arc arcs_23_1[1] = { - {0, 1}, -}; -static state states_23[2] = { - {1, arcs_23_0}, - {1, arcs_23_1}, -}; -static const arc arcs_24_0[1] = { - {18, 1}, -}; -static const arc arcs_24_1[1] = { - {0, 1}, -}; -static state states_24[2] = { - {1, arcs_24_0}, - {1, arcs_24_1}, -}; -static const arc arcs_25_0[1] = { - {31, 1}, -}; -static const arc arcs_25_1[2] = { - {80, 2}, - {0, 1}, -}; -static const arc arcs_25_2[1] = { - {0, 2}, -}; -static state states_25[3] = { - {1, arcs_25_0}, - {2, arcs_25_1}, - {1, arcs_25_2}, -}; -static const arc arcs_26_0[1] = { - {83, 1}, -}; -static const arc arcs_26_1[1] = { - {0, 1}, -}; -static state states_26[2] = { - {1, arcs_26_0}, - {1, arcs_26_1}, -}; -static const arc arcs_27_0[1] = { - {30, 1}, -}; -static const arc arcs_27_1[2] = { - {58, 2}, - {0, 1}, -}; -static const arc arcs_27_2[2] = { - {22, 3}, - {0, 2}, -}; -static const arc arcs_27_3[1] = { - {58, 4}, -}; -static const arc arcs_27_4[1] = { - {0, 4}, -}; -static state states_27[5] = { - {1, arcs_27_0}, - {2, arcs_27_1}, - {2, arcs_27_2}, - {1, arcs_27_3}, - {1, arcs_27_4}, -}; -static const arc arcs_28_0[2] = { - {104, 1}, - {105, 1}, -}; -static const arc arcs_28_1[1] = { - {0, 1}, -}; -static state states_28[2] = { - {2, arcs_28_0}, - {1, arcs_28_1}, -}; -static const arc arcs_29_0[1] = { - {25, 1}, -}; -static const arc arcs_29_1[1] = { - {106, 2}, -}; -static const arc arcs_29_2[1] = { - {0, 2}, -}; -static state states_29[3] = { - {1, arcs_29_0}, - {1, arcs_29_1}, - {1, arcs_29_2}, -}; -static const arc arcs_30_0[1] = { - {22, 1}, -}; -static const arc arcs_30_1[3] = { - {107, 2}, - {9, 2}, - {108, 3}, -}; -static const arc arcs_30_2[4] = { - {107, 2}, - {9, 2}, - {25, 4}, - {108, 3}, -}; -static const arc arcs_30_3[1] = { - {25, 4}, -}; -static const arc arcs_30_4[3] = { - {5, 5}, - {6, 6}, - {109, 6}, -}; -static const arc arcs_30_5[1] = { - {109, 7}, -}; -static const arc arcs_30_6[1] = { - {0, 6}, -}; -static const arc arcs_30_7[1] = { - {61, 6}, -}; -static state states_30[8] = { - {1, arcs_30_0}, - {3, arcs_30_1}, - {4, arcs_30_2}, - {1, arcs_30_3}, - {3, arcs_30_4}, - {1, arcs_30_5}, - {1, arcs_30_6}, - {1, arcs_30_7}, -}; -static const arc arcs_31_0[1] = { - {40, 1}, -}; -static const arc arcs_31_1[2] = { - {111, 2}, - {0, 1}, -}; -static const arc arcs_31_2[1] = { - {40, 3}, -}; -static const arc arcs_31_3[1] = { - {0, 3}, -}; -static state states_31[4] = { - {1, arcs_31_0}, - {2, arcs_31_1}, - {1, arcs_31_2}, - {1, arcs_31_3}, -}; -static const arc arcs_32_0[1] = { - {108, 1}, -}; -static const arc arcs_32_1[2] = { - {111, 2}, - {0, 1}, -}; -static const arc arcs_32_2[1] = { - {40, 3}, -}; -static const arc arcs_32_3[1] = { - {0, 3}, -}; -static state states_32[4] = { - {1, arcs_32_0}, - {2, arcs_32_1}, - {1, arcs_32_2}, - {1, arcs_32_3}, -}; -static const arc arcs_33_0[1] = { - {110, 1}, -}; -static const arc arcs_33_1[2] = { - {65, 2}, - {0, 1}, -}; -static const arc arcs_33_2[2] = { - {110, 1}, - {0, 2}, -}; -static state states_33[3] = { - {1, arcs_33_0}, - {2, arcs_33_1}, - {2, arcs_33_2}, -}; -static const arc arcs_34_0[1] = { - {112, 1}, -}; -static const arc arcs_34_1[2] = { - {65, 0}, - {0, 1}, -}; -static state states_34[2] = { - {1, arcs_34_0}, - {2, arcs_34_1}, -}; -static const arc arcs_35_0[1] = { - {40, 1}, -}; -static const arc arcs_35_1[2] = { - {107, 0}, - {0, 1}, -}; -static state states_35[2] = { - {1, arcs_35_0}, - {2, arcs_35_1}, -}; -static const arc arcs_36_0[1] = { - {23, 1}, -}; -static const arc arcs_36_1[1] = { - {40, 2}, -}; -static const arc arcs_36_2[2] = { - {65, 1}, - {0, 2}, -}; -static state states_36[3] = { - {1, arcs_36_0}, - {1, arcs_36_1}, - {2, arcs_36_2}, -}; -static const arc arcs_37_0[1] = { - {27, 1}, -}; -static const arc arcs_37_1[1] = { - {40, 2}, -}; -static const arc arcs_37_2[2] = { - {65, 1}, - {0, 2}, -}; -static state states_37[3] = { - {1, arcs_37_0}, - {1, arcs_37_1}, - {2, arcs_37_2}, -}; -static const arc arcs_38_0[1] = { - {15, 1}, -}; -static const arc arcs_38_1[1] = { - {58, 2}, -}; -static const arc arcs_38_2[2] = { - {65, 3}, - {0, 2}, -}; -static const arc arcs_38_3[1] = { - {58, 4}, -}; -static const arc arcs_38_4[1] = { - {0, 4}, -}; -static state states_38[5] = { - {1, arcs_38_0}, - {1, arcs_38_1}, - {2, arcs_38_2}, - {1, arcs_38_3}, - {1, arcs_38_4}, -}; -static const arc arcs_39_0[9] = { - {113, 1}, - {53, 1}, - {51, 1}, - {114, 1}, - {54, 1}, - {115, 1}, - {116, 1}, - {117, 1}, - {118, 1}, -}; -static const arc arcs_39_1[1] = { - {0, 1}, -}; -static state states_39[2] = { - {9, arcs_39_0}, - {1, arcs_39_1}, -}; -static const arc arcs_40_0[1] = { - {38, 1}, -}; -static const arc arcs_40_1[3] = { - {114, 2}, - {54, 2}, - {118, 2}, -}; -static const arc arcs_40_2[1] = { - {0, 2}, -}; -static state states_40[3] = { - {1, arcs_40_0}, - {3, arcs_40_1}, - {1, arcs_40_2}, -}; -static const arc arcs_41_0[1] = { - {24, 1}, -}; -static const arc arcs_41_1[1] = { - {49, 2}, -}; -static const arc arcs_41_2[1] = { - {57, 3}, -}; -static const arc arcs_41_3[1] = { - {119, 4}, -}; -static const arc arcs_41_4[3] = { - {120, 1}, - {121, 5}, - {0, 4}, -}; -static const arc arcs_41_5[1] = { - {57, 6}, -}; -static const arc arcs_41_6[1] = { - {119, 7}, -}; -static const arc arcs_41_7[1] = { - {0, 7}, -}; -static state states_41[8] = { - {1, arcs_41_0}, - {1, arcs_41_1}, - {1, arcs_41_2}, - {1, arcs_41_3}, - {3, arcs_41_4}, - {1, arcs_41_5}, - {1, arcs_41_6}, - {1, arcs_41_7}, -}; -static const arc arcs_42_0[1] = { - {33, 1}, -}; -static const arc arcs_42_1[1] = { - {49, 2}, -}; -static const arc arcs_42_2[1] = { - {57, 3}, -}; -static const arc arcs_42_3[1] = { - {119, 4}, -}; -static const arc arcs_42_4[2] = { - {121, 5}, - {0, 4}, -}; -static const arc arcs_42_5[1] = { - {57, 6}, -}; -static const arc arcs_42_6[1] = { - {119, 7}, -}; -static const arc arcs_42_7[1] = { - {0, 7}, -}; -static state states_42[8] = { - {1, arcs_42_0}, - {1, arcs_42_1}, - {1, arcs_42_2}, - {1, arcs_42_3}, - {2, arcs_42_4}, - {1, arcs_42_5}, - {1, arcs_42_6}, - {1, arcs_42_7}, -}; -static const arc arcs_43_0[1] = { - {21, 1}, -}; -static const arc arcs_43_1[1] = { - {98, 2}, -}; -static const arc arcs_43_2[1] = { - {122, 3}, -}; -static const arc arcs_43_3[1] = { - {47, 4}, -}; -static const arc arcs_43_4[1] = { - {57, 5}, -}; -static const arc arcs_43_5[2] = { - {59, 6}, - {119, 7}, -}; -static const arc arcs_43_6[1] = { - {119, 7}, -}; -static const arc arcs_43_7[2] = { - {121, 8}, - {0, 7}, -}; -static const arc arcs_43_8[1] = { - {57, 9}, -}; -static const arc arcs_43_9[1] = { - {119, 10}, -}; -static const arc arcs_43_10[1] = { - {0, 10}, -}; -static state states_43[11] = { - {1, arcs_43_0}, - {1, arcs_43_1}, - {1, arcs_43_2}, - {1, arcs_43_3}, - {1, arcs_43_4}, - {2, arcs_43_5}, - {1, arcs_43_6}, - {2, arcs_43_7}, - {1, arcs_43_8}, - {1, arcs_43_9}, - {1, arcs_43_10}, -}; -static const arc arcs_44_0[1] = { - {32, 1}, -}; -static const arc arcs_44_1[1] = { - {57, 2}, -}; -static const arc arcs_44_2[1] = { - {119, 3}, -}; -static const arc arcs_44_3[2] = { - {123, 4}, - {124, 5}, -}; -static const arc arcs_44_4[1] = { - {57, 6}, -}; -static const arc arcs_44_5[1] = { - {57, 7}, -}; -static const arc arcs_44_6[1] = { - {119, 8}, -}; -static const arc arcs_44_7[1] = { - {119, 9}, -}; -static const arc arcs_44_8[1] = { - {0, 8}, -}; -static const arc arcs_44_9[4] = { - {121, 10}, - {123, 4}, - {124, 5}, - {0, 9}, -}; -static const arc arcs_44_10[1] = { - {57, 11}, -}; -static const arc arcs_44_11[1] = { - {119, 12}, -}; -static const arc arcs_44_12[2] = { - {123, 4}, - {0, 12}, -}; -static state states_44[13] = { - {1, arcs_44_0}, - {1, arcs_44_1}, - {1, arcs_44_2}, - {2, arcs_44_3}, - {1, arcs_44_4}, - {1, arcs_44_5}, - {1, arcs_44_6}, - {1, arcs_44_7}, - {1, arcs_44_8}, - {4, arcs_44_9}, - {1, arcs_44_10}, - {1, arcs_44_11}, - {2, arcs_44_12}, -}; -static const arc arcs_45_0[1] = { - {34, 1}, -}; -static const arc arcs_45_1[1] = { - {125, 2}, -}; -static const arc arcs_45_2[2] = { - {65, 1}, - {57, 3}, -}; -static const arc arcs_45_3[2] = { - {59, 4}, - {119, 5}, -}; -static const arc arcs_45_4[1] = { - {119, 5}, -}; -static const arc arcs_45_5[1] = { - {0, 5}, -}; -static state states_45[6] = { - {1, arcs_45_0}, - {1, arcs_45_1}, - {2, arcs_45_2}, - {2, arcs_45_3}, - {1, arcs_45_4}, - {1, arcs_45_5}, -}; -static const arc arcs_46_0[1] = { - {58, 1}, -}; -static const arc arcs_46_1[2] = { - {111, 2}, - {0, 1}, -}; -static const arc arcs_46_2[1] = { - {126, 3}, -}; -static const arc arcs_46_3[1] = { - {0, 3}, -}; -static state states_46[4] = { - {1, arcs_46_0}, - {2, arcs_46_1}, - {1, arcs_46_2}, - {1, arcs_46_3}, -}; -static const arc arcs_47_0[1] = { - {127, 1}, -}; -static const arc arcs_47_1[2] = { - {58, 2}, - {0, 1}, -}; -static const arc arcs_47_2[2] = { - {111, 3}, - {0, 2}, -}; -static const arc arcs_47_3[1] = { - {40, 4}, -}; -static const arc arcs_47_4[1] = { - {0, 4}, -}; -static state states_47[5] = { - {1, arcs_47_0}, - {2, arcs_47_1}, - {2, arcs_47_2}, - {1, arcs_47_3}, - {1, arcs_47_4}, -}; -static const arc arcs_48_0[2] = { - {2, 1}, - {4, 2}, -}; -static const arc arcs_48_1[1] = { - {128, 3}, -}; -static const arc arcs_48_2[1] = { - {0, 2}, -}; -static const arc arcs_48_3[1] = { - {45, 4}, -}; -static const arc arcs_48_4[2] = { - {129, 2}, - {45, 4}, -}; -static state states_48[5] = { - {2, arcs_48_0}, - {1, arcs_48_1}, - {1, arcs_48_2}, - {1, arcs_48_3}, - {2, arcs_48_4}, -}; -static const arc arcs_49_0[1] = { - {58, 1}, -}; -static const arc arcs_49_1[2] = { - {130, 2}, - {0, 1}, -}; -static const arc arcs_49_2[1] = { - {58, 3}, -}; -static const arc arcs_49_3[1] = { - {0, 3}, -}; -static state states_49[4] = { - {1, arcs_49_0}, - {2, arcs_49_1}, - {1, arcs_49_2}, - {1, arcs_49_3}, -}; -static const arc arcs_50_0[2] = { - {131, 1}, - {132, 2}, -}; -static const arc arcs_50_1[1] = { - {0, 1}, -}; -static const arc arcs_50_2[2] = { - {24, 3}, - {0, 2}, -}; -static const arc arcs_50_3[1] = { - {132, 4}, -}; -static const arc arcs_50_4[1] = { - {121, 5}, -}; -static const arc arcs_50_5[1] = { - {58, 1}, -}; -static state states_50[6] = { - {2, arcs_50_0}, - {1, arcs_50_1}, - {2, arcs_50_2}, - {1, arcs_50_3}, - {1, arcs_50_4}, - {1, arcs_50_5}, -}; -static const arc arcs_51_0[2] = { - {134, 1}, - {132, 1}, -}; -static const arc arcs_51_1[1] = { - {0, 1}, -}; -static state states_51[2] = { - {2, arcs_51_0}, - {1, arcs_51_1}, -}; -static const arc arcs_52_0[1] = { - {26, 1}, -}; -static const arc arcs_52_1[2] = { - {57, 2}, - {68, 3}, -}; -static const arc arcs_52_2[1] = { - {58, 4}, -}; -static const arc arcs_52_3[1] = { - {57, 2}, -}; -static const arc arcs_52_4[1] = { - {0, 4}, -}; -static state states_52[5] = { - {1, arcs_52_0}, - {2, arcs_52_1}, - {1, arcs_52_2}, - {1, arcs_52_3}, - {1, arcs_52_4}, -}; -static const arc arcs_53_0[1] = { - {26, 1}, -}; -static const arc arcs_53_1[2] = { - {57, 2}, - {68, 3}, -}; -static const arc arcs_53_2[1] = { - {133, 4}, -}; -static const arc arcs_53_3[1] = { - {57, 2}, -}; -static const arc arcs_53_4[1] = { - {0, 4}, -}; -static state states_53[5] = { - {1, arcs_53_0}, - {2, arcs_53_1}, - {1, arcs_53_2}, - {1, arcs_53_3}, - {1, arcs_53_4}, -}; -static const arc arcs_54_0[1] = { - {135, 1}, -}; -static const arc arcs_54_1[2] = { - {136, 0}, - {0, 1}, -}; -static state states_54[2] = { - {1, arcs_54_0}, - {2, arcs_54_1}, -}; -static const arc arcs_55_0[1] = { - {137, 1}, -}; -static const arc arcs_55_1[2] = { - {138, 0}, - {0, 1}, -}; -static state states_55[2] = { - {1, arcs_55_0}, - {2, arcs_55_1}, -}; -static const arc arcs_56_0[2] = { - {28, 1}, - {139, 2}, -}; -static const arc arcs_56_1[1] = { - {137, 2}, -}; -static const arc arcs_56_2[1] = { - {0, 2}, -}; -static state states_56[3] = { - {2, arcs_56_0}, - {1, arcs_56_1}, - {1, arcs_56_2}, -}; -static const arc arcs_57_0[1] = { - {126, 1}, -}; -static const arc arcs_57_1[2] = { - {140, 0}, - {0, 1}, -}; -static state states_57[2] = { - {1, arcs_57_0}, - {2, arcs_57_1}, -}; -static const arc arcs_58_0[10] = { - {141, 1}, - {142, 1}, - {143, 1}, - {141, 1}, - {144, 1}, - {145, 1}, - {146, 1}, - {122, 1}, - {147, 2}, - {28, 3}, -}; -static const arc arcs_58_1[1] = { - {0, 1}, -}; -static const arc arcs_58_2[2] = { - {28, 1}, - {0, 2}, -}; -static const arc arcs_58_3[1] = { - {122, 1}, -}; -static state states_58[4] = { - {10, arcs_58_0}, - {1, arcs_58_1}, - {2, arcs_58_2}, - {1, arcs_58_3}, -}; -static const arc arcs_59_0[1] = { - {6, 1}, -}; -static const arc arcs_59_1[1] = { - {126, 2}, -}; -static const arc arcs_59_2[1] = { - {0, 2}, -}; -static state states_59[3] = { - {1, arcs_59_0}, - {1, arcs_59_1}, - {1, arcs_59_2}, -}; -static const arc arcs_60_0[1] = { - {148, 1}, -}; -static const arc arcs_60_1[2] = { - {149, 0}, - {0, 1}, -}; -static state states_60[2] = { - {1, arcs_60_0}, - {2, arcs_60_1}, -}; -static const arc arcs_61_0[1] = { - {150, 1}, -}; -static const arc arcs_61_1[2] = { - {151, 0}, - {0, 1}, -}; -static state states_61[2] = { - {1, arcs_61_0}, - {2, arcs_61_1}, -}; -static const arc arcs_62_0[1] = { - {152, 1}, -}; -static const arc arcs_62_1[2] = { - {153, 0}, - {0, 1}, -}; -static state states_62[2] = { - {1, arcs_62_0}, - {2, arcs_62_1}, -}; -static const arc arcs_63_0[1] = { - {154, 1}, -}; -static const arc arcs_63_1[3] = { - {155, 0}, - {156, 0}, - {0, 1}, -}; -static state states_63[2] = { - {1, arcs_63_0}, - {3, arcs_63_1}, -}; -static const arc arcs_64_0[1] = { - {157, 1}, -}; -static const arc arcs_64_1[3] = { - {7, 0}, - {8, 0}, - {0, 1}, -}; -static state states_64[2] = { - {1, arcs_64_0}, - {3, arcs_64_1}, -}; -static const arc arcs_65_0[1] = { - {158, 1}, -}; -static const arc arcs_65_1[6] = { - {159, 0}, - {6, 0}, - {67, 0}, - {160, 0}, - {10, 0}, - {0, 1}, -}; -static state states_65[2] = { - {1, arcs_65_0}, - {6, arcs_65_1}, -}; -static const arc arcs_66_0[4] = { - {7, 1}, - {8, 1}, - {37, 1}, - {161, 2}, -}; -static const arc arcs_66_1[1] = { - {158, 2}, -}; -static const arc arcs_66_2[1] = { - {0, 2}, -}; -static state states_66[3] = { - {4, arcs_66_0}, - {1, arcs_66_1}, - {1, arcs_66_2}, -}; -static const arc arcs_67_0[1] = { - {162, 1}, -}; -static const arc arcs_67_1[2] = { - {63, 2}, - {0, 1}, -}; -static const arc arcs_67_2[1] = { - {158, 3}, -}; -static const arc arcs_67_3[1] = { - {0, 3}, -}; -static state states_67[4] = { - {1, arcs_67_0}, - {2, arcs_67_1}, - {1, arcs_67_2}, - {1, arcs_67_3}, -}; -static const arc arcs_68_0[2] = { - {39, 1}, - {163, 2}, -}; -static const arc arcs_68_1[1] = { - {163, 2}, -}; -static const arc arcs_68_2[2] = { - {164, 2}, - {0, 2}, -}; -static state states_68[3] = { - {2, arcs_68_0}, - {1, arcs_68_1}, - {2, arcs_68_2}, -}; -static const arc arcs_69_0[10] = { - {5, 1}, - {9, 2}, - {11, 2}, - {12, 2}, - {13, 2}, - {14, 3}, - {36, 4}, - {40, 2}, - {41, 2}, - {42, 5}, -}; -static const arc arcs_69_1[3] = { - {61, 2}, - {165, 6}, - {83, 6}, -}; -static const arc arcs_69_2[1] = { - {0, 2}, -}; -static const arc arcs_69_3[2] = { - {166, 2}, - {165, 7}, -}; -static const arc arcs_69_4[2] = { - {167, 2}, - {168, 8}, -}; -static const arc arcs_69_5[2] = { - {42, 5}, - {0, 5}, -}; -static const arc arcs_69_6[1] = { - {61, 2}, -}; -static const arc arcs_69_7[1] = { - {166, 2}, -}; -static const arc arcs_69_8[1] = { - {167, 2}, -}; -static state states_69[9] = { - {10, arcs_69_0}, - {3, arcs_69_1}, - {1, arcs_69_2}, - {2, arcs_69_3}, - {2, arcs_69_4}, - {2, arcs_69_5}, - {1, arcs_69_6}, - {1, arcs_69_7}, - {1, arcs_69_8}, -}; -static const arc arcs_70_0[2] = { - {49, 1}, - {84, 1}, -}; -static const arc arcs_70_1[3] = { - {65, 2}, - {169, 3}, - {0, 1}, -}; -static const arc arcs_70_2[3] = { - {49, 4}, - {84, 4}, - {0, 2}, -}; -static const arc arcs_70_3[1] = { - {0, 3}, -}; -static const arc arcs_70_4[2] = { - {65, 2}, - {0, 4}, -}; -static state states_70[5] = { - {2, arcs_70_0}, - {3, arcs_70_1}, - {3, arcs_70_2}, - {1, arcs_70_3}, - {2, arcs_70_4}, -}; -static const arc arcs_71_0[3] = { - {5, 1}, - {107, 2}, - {14, 3}, -}; -static const arc arcs_71_1[2] = { - {61, 4}, - {170, 5}, -}; -static const arc arcs_71_2[1] = { - {40, 4}, -}; -static const arc arcs_71_3[1] = { - {171, 6}, -}; -static const arc arcs_71_4[1] = { - {0, 4}, -}; -static const arc arcs_71_5[1] = { - {61, 4}, -}; -static const arc arcs_71_6[1] = { - {166, 4}, -}; -static state states_71[7] = { - {3, arcs_71_0}, - {2, arcs_71_1}, - {1, arcs_71_2}, - {1, arcs_71_3}, - {1, arcs_71_4}, - {1, arcs_71_5}, - {1, arcs_71_6}, -}; -static const arc arcs_72_0[1] = { - {172, 1}, -}; -static const arc arcs_72_1[2] = { - {65, 2}, - {0, 1}, -}; -static const arc arcs_72_2[2] = { - {172, 1}, - {0, 2}, -}; -static state states_72[3] = { - {1, arcs_72_0}, - {2, arcs_72_1}, - {2, arcs_72_2}, -}; -static const arc arcs_73_0[2] = { - {57, 1}, - {58, 2}, -}; -static const arc arcs_73_1[3] = { - {173, 3}, - {58, 4}, - {0, 1}, -}; -static const arc arcs_73_2[2] = { - {57, 1}, - {0, 2}, -}; -static const arc arcs_73_3[1] = { - {0, 3}, -}; -static const arc arcs_73_4[2] = { - {173, 3}, - {0, 4}, -}; -static state states_73[5] = { - {2, arcs_73_0}, - {3, arcs_73_1}, - {2, arcs_73_2}, - {1, arcs_73_3}, - {2, arcs_73_4}, -}; -static const arc arcs_74_0[1] = { - {57, 1}, -}; -static const arc arcs_74_1[2] = { - {58, 2}, - {0, 1}, -}; -static const arc arcs_74_2[1] = { - {0, 2}, -}; -static state states_74[3] = { - {1, arcs_74_0}, - {2, arcs_74_1}, - {1, arcs_74_2}, -}; -static const arc arcs_75_0[2] = { - {126, 1}, - {84, 1}, -}; -static const arc arcs_75_1[2] = { - {65, 2}, - {0, 1}, -}; -static const arc arcs_75_2[3] = { - {126, 1}, - {84, 1}, - {0, 2}, -}; -static state states_75[3] = { - {2, arcs_75_0}, - {2, arcs_75_1}, - {3, arcs_75_2}, -}; -static const arc arcs_76_0[1] = { - {58, 1}, -}; -static const arc arcs_76_1[2] = { - {65, 2}, - {0, 1}, -}; -static const arc arcs_76_2[2] = { - {58, 1}, - {0, 2}, -}; -static state states_76[3] = { - {1, arcs_76_0}, - {2, arcs_76_1}, - {2, arcs_76_2}, -}; -static const arc arcs_77_0[3] = { - {63, 1}, - {84, 2}, - {58, 3}, -}; -static const arc arcs_77_1[1] = { - {126, 4}, -}; -static const arc arcs_77_2[3] = { - {65, 5}, - {169, 6}, - {0, 2}, -}; -static const arc arcs_77_3[4] = { - {65, 5}, - {57, 7}, - {169, 6}, - {0, 3}, -}; -static const arc arcs_77_4[3] = { - {65, 8}, - {169, 6}, - {0, 4}, -}; -static const arc arcs_77_5[3] = { - {84, 9}, - {58, 9}, - {0, 5}, -}; -static const arc arcs_77_6[1] = { - {0, 6}, -}; -static const arc arcs_77_7[1] = { - {58, 4}, -}; -static const arc arcs_77_8[3] = { - {63, 10}, - {58, 11}, - {0, 8}, -}; -static const arc arcs_77_9[2] = { - {65, 5}, - {0, 9}, -}; -static const arc arcs_77_10[1] = { - {126, 12}, -}; -static const arc arcs_77_11[1] = { - {57, 13}, -}; -static const arc arcs_77_12[2] = { - {65, 8}, - {0, 12}, -}; -static const arc arcs_77_13[1] = { - {58, 12}, -}; -static state states_77[14] = { - {3, arcs_77_0}, - {1, arcs_77_1}, - {3, arcs_77_2}, - {4, arcs_77_3}, - {3, arcs_77_4}, - {3, arcs_77_5}, - {1, arcs_77_6}, - {1, arcs_77_7}, - {3, arcs_77_8}, - {2, arcs_77_9}, - {1, arcs_77_10}, - {1, arcs_77_11}, - {2, arcs_77_12}, - {1, arcs_77_13}, -}; -static const arc arcs_78_0[1] = { - {17, 1}, -}; -static const arc arcs_78_1[1] = { - {40, 2}, -}; -static const arc arcs_78_2[2] = { - {5, 3}, - {57, 4}, -}; -static const arc arcs_78_3[2] = { - {61, 5}, - {170, 6}, -}; -static const arc arcs_78_4[1] = { - {119, 7}, -}; -static const arc arcs_78_5[1] = { - {57, 4}, -}; -static const arc arcs_78_6[1] = { - {61, 5}, -}; -static const arc arcs_78_7[1] = { - {0, 7}, -}; -static state states_78[8] = { - {1, arcs_78_0}, - {1, arcs_78_1}, - {2, arcs_78_2}, - {2, arcs_78_3}, - {1, arcs_78_4}, - {1, arcs_78_5}, - {1, arcs_78_6}, - {1, arcs_78_7}, -}; -static const arc arcs_79_0[1] = { - {174, 1}, -}; -static const arc arcs_79_1[2] = { - {65, 2}, - {0, 1}, -}; -static const arc arcs_79_2[2] = { - {174, 1}, - {0, 2}, -}; -static state states_79[3] = { - {1, arcs_79_0}, - {2, arcs_79_1}, - {2, arcs_79_2}, -}; -static const arc arcs_80_0[3] = { - {6, 1}, - {63, 1}, - {58, 2}, -}; -static const arc arcs_80_1[1] = { - {58, 3}, -}; -static const arc arcs_80_2[4] = { - {130, 1}, - {66, 1}, - {169, 3}, - {0, 2}, -}; -static const arc arcs_80_3[1] = { - {0, 3}, -}; -static state states_80[4] = { - {3, arcs_80_0}, - {1, arcs_80_1}, - {4, arcs_80_2}, - {1, arcs_80_3}, -}; -static const arc arcs_81_0[2] = { - {169, 1}, - {176, 1}, -}; -static const arc arcs_81_1[1] = { - {0, 1}, -}; -static state states_81[2] = { - {2, arcs_81_0}, - {1, arcs_81_1}, -}; -static const arc arcs_82_0[1] = { - {21, 1}, -}; -static const arc arcs_82_1[1] = { - {98, 2}, -}; -static const arc arcs_82_2[1] = { - {122, 3}, -}; -static const arc arcs_82_3[1] = { - {132, 4}, -}; -static const arc arcs_82_4[2] = { - {175, 5}, - {0, 4}, -}; -static const arc arcs_82_5[1] = { - {0, 5}, -}; -static state states_82[6] = { - {1, arcs_82_0}, - {1, arcs_82_1}, - {1, arcs_82_2}, - {1, arcs_82_3}, - {2, arcs_82_4}, - {1, arcs_82_5}, -}; -static const arc arcs_83_0[2] = { - {38, 1}, - {177, 2}, -}; -static const arc arcs_83_1[1] = { - {177, 2}, -}; -static const arc arcs_83_2[1] = { - {0, 2}, -}; -static state states_83[3] = { - {2, arcs_83_0}, - {1, arcs_83_1}, - {1, arcs_83_2}, -}; -static const arc arcs_84_0[1] = { - {24, 1}, -}; -static const arc arcs_84_1[1] = { - {133, 2}, -}; -static const arc arcs_84_2[2] = { - {175, 3}, - {0, 2}, -}; -static const arc arcs_84_3[1] = { - {0, 3}, -}; -static state states_84[4] = { - {1, arcs_84_0}, - {1, arcs_84_1}, - {2, arcs_84_2}, - {1, arcs_84_3}, -}; -static const arc arcs_85_0[1] = { - {40, 1}, -}; -static const arc arcs_85_1[1] = { - {0, 1}, -}; -static state states_85[2] = { - {1, arcs_85_0}, - {1, arcs_85_1}, -}; -static const arc arcs_86_0[1] = { - {35, 1}, -}; -static const arc arcs_86_1[2] = { - {179, 2}, - {0, 1}, -}; -static const arc arcs_86_2[1] = { - {0, 2}, -}; -static state states_86[3] = { - {1, arcs_86_0}, - {2, arcs_86_1}, - {1, arcs_86_2}, -}; -static const arc arcs_87_0[2] = { - {22, 1}, - {80, 2}, -}; -static const arc arcs_87_1[1] = { - {58, 2}, -}; -static const arc arcs_87_2[1] = { - {0, 2}, -}; -static state states_87[3] = { - {2, arcs_87_0}, - {1, arcs_87_1}, - {1, arcs_87_2}, -}; -static const arc arcs_88_0[2] = { - {2, 1}, - {4, 2}, -}; -static const arc arcs_88_1[2] = { - {128, 3}, - {59, 4}, -}; -static const arc arcs_88_2[1] = { - {0, 2}, -}; -static const arc arcs_88_3[1] = { - {45, 5}, -}; -static const arc arcs_88_4[1] = { - {2, 6}, -}; -static const arc arcs_88_5[2] = { - {129, 2}, - {45, 5}, -}; -static const arc arcs_88_6[1] = { - {128, 3}, -}; -static state states_88[7] = { - {2, arcs_88_0}, - {2, arcs_88_1}, - {1, arcs_88_2}, - {1, arcs_88_3}, - {1, arcs_88_4}, - {2, arcs_88_5}, - {1, arcs_88_6}, -}; -static const arc arcs_89_0[1] = { - {181, 1}, -}; -static const arc arcs_89_1[2] = { - {44, 2}, - {2, 1}, -}; -static const arc arcs_89_2[1] = { - {0, 2}, -}; -static state states_89[3] = { - {1, arcs_89_0}, - {2, arcs_89_1}, - {1, arcs_89_2}, -}; -static const arc arcs_90_0[1] = { - {5, 1}, -}; -static const arc arcs_90_1[2] = { - {61, 2}, - {182, 3}, -}; -static const arc arcs_90_2[1] = { - {56, 4}, -}; -static const arc arcs_90_3[1] = { - {61, 2}, -}; -static const arc arcs_90_4[1] = { - {58, 5}, -}; -static const arc arcs_90_5[1] = { - {0, 5}, -}; -static state states_90[6] = { - {1, arcs_90_0}, - {2, arcs_90_1}, - {1, arcs_90_2}, - {1, arcs_90_3}, - {1, arcs_90_4}, - {1, arcs_90_5}, -}; -static const arc arcs_91_0[3] = { - {6, 1}, - {63, 2}, - {58, 3}, -}; -static const arc arcs_91_1[3] = { - {65, 4}, - {58, 5}, - {0, 1}, -}; -static const arc arcs_91_2[1] = { - {58, 6}, -}; -static const arc arcs_91_3[2] = { - {65, 7}, - {0, 3}, -}; -static const arc arcs_91_4[2] = { - {63, 2}, - {58, 5}, -}; -static const arc arcs_91_5[2] = { - {65, 4}, - {0, 5}, -}; -static const arc arcs_91_6[1] = { - {0, 6}, -}; -static const arc arcs_91_7[4] = { - {6, 8}, - {63, 2}, - {58, 3}, - {0, 7}, -}; -static const arc arcs_91_8[3] = { - {65, 9}, - {58, 10}, - {0, 8}, -}; -static const arc arcs_91_9[2] = { - {63, 2}, - {58, 10}, -}; -static const arc arcs_91_10[2] = { - {65, 9}, - {0, 10}, -}; -static state states_91[11] = { - {3, arcs_91_0}, - {3, arcs_91_1}, - {1, arcs_91_2}, - {2, arcs_91_3}, - {2, arcs_91_4}, - {2, arcs_91_5}, - {1, arcs_91_6}, - {4, arcs_91_7}, - {3, arcs_91_8}, - {2, arcs_91_9}, - {2, arcs_91_10}, -}; -static const dfa dfas[92] = { - {256, "single_input", 3, states_0, - "\344\377\377\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {257, "file_input", 2, states_1, - "\344\377\377\377\377\027\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {258, "eval_input", 3, states_2, - "\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {259, "decorator", 4, states_3, - "\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {260, "decorators", 2, states_4, - "\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {261, "decorated", 3, states_5, - "\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {262, "async_funcdef", 3, states_6, - "\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {263, "funcdef", 9, states_7, - "\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {264, "parameters", 4, states_8, - "\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {265, "typedargslist", 42, states_9, - "\100\000\000\000\000\001\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {266, "tfpdef", 4, states_10, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {267, "varargslist", 34, states_11, - "\100\000\000\000\000\001\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {268, "vfpdef", 2, states_12, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {269, "stmt", 2, states_13, - "\340\377\377\377\377\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {270, "simple_stmt", 4, states_14, - "\340\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {271, "small_stmt", 2, states_15, - "\340\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {272, "expr_stmt", 6, states_16, - "\340\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {273, "annassign", 5, states_17, - "\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {274, "testlist_star_expr", 3, states_18, - "\340\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {275, "augassign", 2, states_19, - "\000\000\000\000\000\000\000\000\000\000\340\377\003\000\000\000\000\000\000\000\000\000\000"}, - {276, "del_stmt", 3, states_20, - "\000\000\020\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {277, "pass_stmt", 2, states_21, - "\000\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {278, "flow_stmt", 2, states_22, - "\000\000\005\300\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {279, "break_stmt", 2, states_23, - "\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {280, "continue_stmt", 2, states_24, - "\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {281, "return_stmt", 3, states_25, - "\000\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {282, "yield_stmt", 2, states_26, - "\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {283, "raise_stmt", 5, states_27, - "\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {284, "import_stmt", 2, states_28, - "\000\000\100\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {285, "import_name", 3, states_29, - "\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {286, "import_from", 8, states_30, - "\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {287, "import_as_name", 4, states_31, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {288, "dotted_as_name", 4, states_32, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {289, "import_as_names", 3, states_33, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {290, "dotted_as_names", 2, states_34, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {291, "dotted_name", 2, states_35, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {292, "global_stmt", 3, states_36, - "\000\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {293, "nonlocal_stmt", 3, states_37, - "\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {294, "assert_stmt", 5, states_38, - "\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {295, "compound_stmt", 2, states_39, - "\000\004\052\001\107\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {296, "async_stmt", 3, states_40, - "\000\000\000\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {297, "if_stmt", 8, states_41, - "\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {298, "while_stmt", 8, states_42, - "\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {299, "for_stmt", 11, states_43, - "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {300, "try_stmt", 13, states_44, - "\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {301, "with_stmt", 6, states_45, - "\000\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {302, "with_item", 4, states_46, - "\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {303, "except_clause", 5, states_47, - "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\200\000\000\000\000\000\000\000"}, - {304, "suite", 5, states_48, - "\344\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {305, "namedexpr_test", 4, states_49, - "\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {306, "test", 6, states_50, - "\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {307, "test_nocond", 2, states_51, - "\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {308, "lambdef", 5, states_52, - "\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {309, "lambdef_nocond", 5, states_53, - "\000\000\000\004\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {310, "or_test", 2, states_54, - "\240\173\000\020\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {311, "and_test", 2, states_55, - "\240\173\000\020\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {312, "not_test", 3, states_56, - "\240\173\000\020\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {313, "comparison", 2, states_57, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {314, "comp_op", 4, states_58, - "\000\000\000\020\000\000\000\000\000\000\000\000\000\000\000\004\000\340\017\000\000\000\000"}, - {315, "star_expr", 3, states_59, - "\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {316, "expr", 2, states_60, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {317, "xor_expr", 2, states_61, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {318, "and_expr", 2, states_62, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {319, "shift_expr", 2, states_63, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {320, "arith_expr", 2, states_64, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {321, "term", 2, states_65, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {322, "factor", 3, states_66, - "\240\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {323, "power", 4, states_67, - "\040\172\000\000\220\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {324, "atom_expr", 3, states_68, - "\040\172\000\000\220\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {325, "atom", 9, states_69, - "\040\172\000\000\020\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {326, "testlist_comp", 5, states_70, - "\340\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {327, "trailer", 7, states_71, - "\040\100\000\000\000\000\000\000\000\000\000\000\000\010\000\000\000\000\000\000\000\000\000"}, - {328, "subscriptlist", 3, states_72, - "\240\173\000\024\260\007\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {329, "subscript", 5, states_73, - "\240\173\000\024\260\007\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {330, "sliceop", 3, states_74, - "\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {331, "exprlist", 3, states_75, - "\340\173\000\000\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {332, "testlist", 3, states_76, - "\240\173\000\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {333, "dictorsetmaker", 14, states_77, - "\340\173\000\024\260\007\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {334, "classdef", 8, states_78, - "\000\000\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {335, "arglist", 3, states_79, - "\340\173\000\024\260\007\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {336, "argument", 4, states_80, - "\340\173\000\024\260\007\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {337, "comp_iter", 2, states_81, - "\000\000\040\001\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {338, "sync_comp_for", 6, states_82, - "\000\000\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {339, "comp_for", 3, states_83, - "\000\000\040\000\100\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {340, "comp_if", 4, states_84, - "\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {341, "encoding_decl", 2, states_85, - "\000\000\000\000\000\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {342, "yield_expr", 3, states_86, - "\000\000\000\000\010\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {343, "yield_arg", 3, states_87, - "\340\173\100\024\260\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {344, "func_body_suite", 7, states_88, - "\344\373\325\376\270\007\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {345, "func_type_input", 3, states_89, - "\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {346, "func_type", 6, states_90, - "\040\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, - {347, "typelist", 11, states_91, - "\340\173\000\024\260\007\000\200\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000"}, -}; -static const label labels[183] = { - {0, "EMPTY"}, - {256, 0}, - {4, 0}, - {295, 0}, - {270, 0}, - {7, 0}, - {16, 0}, - {14, 0}, - {15, 0}, - {52, 0}, - {49, 0}, - {1, "False"}, - {1, "None"}, - {1, "True"}, - {9, 0}, - {1, "assert"}, - {1, "break"}, - {1, "class"}, - {1, "continue"}, - {1, "def"}, - {1, "del"}, - {1, "for"}, - {1, "from"}, - {1, "global"}, - {1, "if"}, - {1, "import"}, - {1, "lambda"}, - {1, "nonlocal"}, - {1, "not"}, - {1, "pass"}, - {1, "raise"}, - {1, "return"}, - {1, "try"}, - {1, "while"}, - {1, "with"}, - {1, "yield"}, - {25, 0}, - {31, 0}, - {56, 0}, - {55, 0}, - {1, 0}, - {2, 0}, - {3, 0}, - {257, 0}, - {0, 0}, - {269, 0}, - {258, 0}, - {332, 0}, - {259, 0}, - {305, 0}, - {260, 0}, - {261, 0}, - {262, 0}, - {334, 0}, - {263, 0}, - {264, 0}, - {51, 0}, - {11, 0}, - {306, 0}, - {58, 0}, - {344, 0}, - {8, 0}, - {265, 0}, - {35, 0}, - {266, 0}, - {12, 0}, - {22, 0}, - {17, 0}, - {267, 0}, - {268, 0}, - {271, 0}, - {13, 0}, - {294, 0}, - {276, 0}, - {272, 0}, - {278, 0}, - {292, 0}, - {284, 0}, - {293, 0}, - {277, 0}, - {274, 0}, - {273, 0}, - {275, 0}, - {342, 0}, - {315, 0}, - {40, 0}, - {41, 0}, - {46, 0}, - {38, 0}, - {36, 0}, - {37, 0}, - {48, 0}, - {39, 0}, - {44, 0}, - {45, 0}, - {50, 0}, - {43, 0}, - {42, 0}, - {331, 0}, - {279, 0}, - {280, 0}, - {283, 0}, - {281, 0}, - {282, 0}, - {286, 0}, - {285, 0}, - {290, 0}, - {23, 0}, - {291, 0}, - {289, 0}, - {287, 0}, - {1, "as"}, - {288, 0}, - {296, 0}, - {299, 0}, - {297, 0}, - {300, 0}, - {298, 0}, - {301, 0}, - {304, 0}, - {1, "elif"}, - {1, "else"}, - {1, "in"}, - {1, "finally"}, - {303, 0}, - {302, 0}, - {316, 0}, - {1, "except"}, - {5, 0}, - {6, 0}, - {53, 0}, - {308, 0}, - {310, 0}, - {307, 0}, - {309, 0}, - {311, 0}, - {1, "or"}, - {312, 0}, - {1, "and"}, - {313, 0}, - {314, 0}, - {28, 0}, - {20, 0}, - {29, 0}, - {27, 0}, - {21, 0}, - {30, 0}, - {1, "is"}, - {317, 0}, - {18, 0}, - {318, 0}, - {32, 0}, - {319, 0}, - {19, 0}, - {320, 0}, - {33, 0}, - {34, 0}, - {321, 0}, - {322, 0}, - {24, 0}, - {47, 0}, - {323, 0}, - {324, 0}, - {325, 0}, - {327, 0}, - {326, 0}, - {10, 0}, - {26, 0}, - {333, 0}, - {339, 0}, - {335, 0}, - {328, 0}, - {329, 0}, - {330, 0}, - {336, 0}, - {337, 0}, - {340, 0}, - {338, 0}, - {341, 0}, - {343, 0}, - {345, 0}, - {346, 0}, - {347, 0}, -}; -Py_EXPORTED_SYMBOL grammar _PyParser_Grammar = { - 92, - dfas, - {183, labels}, - 256 -}; diff --git a/Python/peephole.c b/Python/peephole.c index 84de1abc17547..fe67de42227b5 100644 --- a/Python/peephole.c +++ b/Python/peephole.c @@ -3,7 +3,6 @@ #include "Python.h" #include "Python-ast.h" -#include "node.h" #include "ast.h" #include "code.h" #include "symtable.h" diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 7a3b5b52ac417..04fad04227df9 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -20,13 +20,10 @@ #include "pycore_pystate.h" // _PyInterpreterState_GET() #include "pycore_sysmodule.h" // _PySys_Audit() -#include "node.h" // node #include "token.h" // INDENT -#include "parsetok.h" // perrdetail #include "errcode.h" // E_EOF #include "code.h" // PyCodeObject #include "symtable.h" // PySymtable_BuildObject() -#include "ast.h" // PyAST_FromNodeObject() #include "marshal.h" // PyMarshal_ReadLongFromFile() #include "pegen_interface.h" // PyPegen_ASTFrom* diff --git a/Tools/scripts/generate_symbol_py.py b/Tools/scripts/generate_symbol_py.py deleted file mode 100755 index 9219b096e4d67..0000000000000 --- a/Tools/scripts/generate_symbol_py.py +++ /dev/null @@ -1,53 +0,0 @@ -#! /usr/bin/env python3 -# This script generates the symbol.py source file. - -import sys -import re - -def main(inFileName="Include/graminit.h", outFileName="Lib/symbol.py"): - try: - fp = open(inFileName) - except OSError as err: - sys.stderr.write("I/O error: %s\n" % str(err)) - sys.exit(1) - with fp: - lines = fp.read().split("\n") - prog = re.compile( - "#define[ \t][ \t]*([A-Z0-9][A-Z0-9_]*)[ \t][ \t]*([0-9][0-9]*)", - re.IGNORECASE) - tokens = {} - for line in lines: - match = prog.match(line) - if match: - name, val = match.group(1, 2) - val = int(val) - tokens[val] = name # reverse so we can sort them... - keys = sorted(tokens.keys()) - # load the output skeleton from the target: - try: - fp = open(outFileName) - except OSError as err: - sys.stderr.write("I/O error: %s\n" % str(err)) - sys.exit(2) - with fp: - format = fp.read().split("\n") - try: - start = format.index("#--start constants--") + 1 - end = format.index("#--end constants--") - except ValueError: - sys.stderr.write("target does not contain format markers") - sys.exit(3) - lines = [] - for val in keys: - lines.append("%s = %d" % (tokens[val], val)) - format[start:end] = lines - try: - fp = open(outFileName, 'w') - except OSError as err: - sys.stderr.write("I/O error: %s\n" % str(err)) - sys.exit(4) - with fp: - fp.write("\n".join(format)) - -if __name__ == '__main__': - main(*sys.argv[1:]) diff --git a/configure b/configure index 139c2bf7de132..dc590c2e98273 100755 --- a/configure +++ b/configure @@ -2739,7 +2739,7 @@ if test "$srcdir" != . -a "$srcdir" != "$(pwd)"; then # If we're building out-of-tree, we need to make sure the following # resources get picked up before their $srcdir counterparts. # Objects/ -> typeslots.inc - # Include/ -> Python-ast.h, graminit.h + # Include/ -> Python-ast.h # Python/ -> importlib.h # (A side effect of this is that these resources will automatically be # regenerated when building out-of-tree, regardless of whether or not diff --git a/configure.ac b/configure.ac index 3001cc87f951f..70deefb6b9aea 100644 --- a/configure.ac +++ b/configure.ac @@ -16,7 +16,7 @@ if test "$srcdir" != . -a "$srcdir" != "$(pwd)"; then # If we're building out-of-tree, we need to make sure the following # resources get picked up before their $srcdir counterparts. # Objects/ -> typeslots.inc - # Include/ -> Python-ast.h, graminit.h + # Include/ -> Python-ast.h # Python/ -> importlib.h # (A side effect of this is that these resources will automatically be # regenerated when building out-of-tree, regardless of whether or not From webhook-mailer at python.org Sat Jun 20 15:15:08 2020 From: webhook-mailer at python.org (Gregory P. Smith) Date: Sat, 20 Jun 2020 19:15:08 -0000 Subject: [Python-checkins] bpo-41056: Fix a NULL pointer dereference on MemoryError within the ssl module. (GH-21009) Message-ID: https://github.com/python/cpython/commit/eb0d5c38de7f970d8cd8524f4163d831c7720f51 commit: eb0d5c38de7f970d8cd8524f4163d831c7720f51 branch: master author: Gregory P. Smith committer: GitHub date: 2020-06-20T12:15:03-07:00 summary: bpo-41056: Fix a NULL pointer dereference on MemoryError within the ssl module. (GH-21009) Detected by Coverity. files: A Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst M Modules/_ssl/debughelpers.c diff --git a/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst b/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst new file mode 100644 index 0000000000000..1776f0d1cf8a3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst @@ -0,0 +1 @@ +Fix a NULL pointer dereference within the ssl module during a MemoryError in the keylog callback. (discovered by Coverity) \ No newline at end of file diff --git a/Modules/_ssl/debughelpers.c b/Modules/_ssl/debughelpers.c index 858b3d7955c9c..b840da2f663af 100644 --- a/Modules/_ssl/debughelpers.c +++ b/Modules/_ssl/debughelpers.c @@ -125,6 +125,12 @@ _PySSL_keylog_callback(const SSL *ssl, const char *line) threadstate = PyGILState_Ensure(); + ssl_obj = (PySSLSocket *)SSL_get_app_data(ssl); + assert(PySSLSocket_Check(ssl_obj)); + if (ssl_obj->ctx->keylog_bio == NULL) { + return; + } + /* Allocate a static lock to synchronize writes to keylog file. * The lock is neither released on exit nor on fork(). The lock is * also shared between all SSLContexts although contexts may write to @@ -141,12 +147,6 @@ _PySSL_keylog_callback(const SSL *ssl, const char *line) } } - ssl_obj = (PySSLSocket *)SSL_get_app_data(ssl); - assert(PySSLSocket_Check(ssl_obj)); - if (ssl_obj->ctx->keylog_bio == NULL) { - return; - } - PySSL_BEGIN_ALLOW_THREADS PyThread_acquire_lock(lock, 1); res = BIO_printf(ssl_obj->ctx->keylog_bio, "%s\n", line); From webhook-mailer at python.org Sat Jun 20 17:12:38 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 20 Jun 2020 21:12:38 -0000 Subject: [Python-checkins] Add link to .pypirc specification (GH-20680) (#21019) Message-ID: https://github.com/python/cpython/commit/a4c09560eaa1e3b9733c7e63d570b6fa7626724c commit: a4c09560eaa1e3b9733c7e63d570b6fa7626724c branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-20T17:12:27-04:00 summary: Add link to .pypirc specification (GH-20680) (#21019) Related to https://github.com/pypa/twine/issues/638 and https://github.com/pypa/packaging.python.org/issues/730, I wrote a spec based on the one that was removed in https://github.com/python/cpython/pull/13087. However, a Google search for "pypirc" turned up at least one [blog post](https://truveris.github.io/articles/configuring-pypirc/) that links to https://docs.python.org/3/distutils/packageindex.htmlGH-the-pypirc-file, which now just links to this document. So, I thought a link to the spec would be handy. Automerge-Triggered-By: @jaraco (cherry picked from commit af157fad286c00ff204e86d8556648cbb53ba99e) Co-authored-by: Brian Rutledge Co-authored-by: Brian Rutledge files: M Doc/distributing/index.rst diff --git a/Doc/distributing/index.rst b/Doc/distributing/index.rst index 5f7b3bbc4f917..02379946244d8 100644 --- a/Doc/distributing/index.rst +++ b/Doc/distributing/index.rst @@ -128,6 +128,7 @@ involved in creating and publishing a project: * `Project structure`_ * `Building and packaging the project`_ * `Uploading the project to the Python Packaging Index`_ +* `The .pypirc file`_ .. _Project structure: \ https://packaging.python.org/tutorials/distributing-packages/ @@ -135,6 +136,8 @@ involved in creating and publishing a project: https://packaging.python.org/tutorials/distributing-packages/#packaging-your-project .. _Uploading the project to the Python Packaging Index: \ https://packaging.python.org/tutorials/distributing-packages/#uploading-your-project-to-pypi +.. _The .pypirc file: \ + https://packaging.python.org/specifications/pypirc/ How do I...? From webhook-mailer at python.org Sat Jun 20 18:07:04 2020 From: webhook-mailer at python.org (Gregory P. Smith) Date: Sat, 20 Jun 2020 22:07:04 -0000 Subject: [Python-checkins] bpo-41056: Use the fildes converter for fd to please Coverity. (GH-21011) Message-ID: https://github.com/python/cpython/commit/3ccb96c9782480e5ce646a4a130569fb92f2965d commit: 3ccb96c9782480e5ce646a4a130569fb92f2965d branch: master author: Gregory P. Smith committer: GitHub date: 2020-06-20T15:06:48-07:00 summary: bpo-41056: Use the fildes converter for fd to please Coverity. (GH-21011) There are a bunch of other fd: int uses in this file, I expect many if not all of them would be better off using the fildes converter. This particular one was flagged by Coverity as it presumably flags fpathconf as not accepting negative fds. I'd expect the other fd's to have been flagged as well otherwise. I'm marking this one as skip news as it really is a no-op. files: A Misc/NEWS.d/next/Library/2020-06-20-18-37-29.bpo-41056.d9v_uL.rst M Modules/clinic/posixmodule.c.h M Modules/posixmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-06-20-18-37-29.bpo-41056.d9v_uL.rst b/Misc/NEWS.d/next/Library/2020-06-20-18-37-29.bpo-41056.d9v_uL.rst new file mode 100644 index 0000000000000..ddcc1102d5ed7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-18-37-29.bpo-41056.d9v_uL.rst @@ -0,0 +1 @@ +Invalid file descriptor values are now prevented from being passed to os.fpathconf. (discovered by Coverity) \ No newline at end of file diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index ff439ee47c393..b691cfbc6edef 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -6803,8 +6803,7 @@ os_fpathconf(PyObject *module, PyObject *const *args, Py_ssize_t nargs) if (!_PyArg_CheckPositional("fpathconf", nargs, 2, 2)) { goto exit; } - fd = _PyLong_AsInt(args[0]); - if (fd == -1 && PyErr_Occurred()) { + if (!fildes_converter(args[0], &fd)) { goto exit; } if (!conv_path_confname(args[1], &name)) { @@ -8877,4 +8876,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=767780ea3beacf34 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d7c1212a94613496 input=a9049054013a1b77]*/ diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c index 79779bfdeafd3..a411f28987ee7 100644 --- a/Modules/posixmodule.c +++ b/Modules/posixmodule.c @@ -10988,7 +10988,7 @@ conv_path_confname(PyObject *arg, int *valuep) /*[clinic input] os.fpathconf -> long - fd: int + fd: fildes name: path_confname / @@ -10999,7 +10999,7 @@ If there is no limit, return -1. static long os_fpathconf_impl(PyObject *module, int fd, int name) -/*[clinic end generated code: output=d5b7042425fc3e21 input=5942a024d3777810]*/ +/*[clinic end generated code: output=d5b7042425fc3e21 input=5b8d2471cfaae186]*/ { long limit; From webhook-mailer at python.org Sat Jun 20 22:18:08 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sun, 21 Jun 2020 02:18:08 -0000 Subject: [Python-checkins] bpo-41060: Avoid SEGFAULT when calling GET_INVALID_TARGET in the grammar (GH-21020) Message-ID: https://github.com/python/cpython/commit/6c4e0bd974f2895d42b63d9d004587e74b286c88 commit: 6c4e0bd974f2895d42b63d9d004587e74b286c88 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-21T03:18:01+01:00 summary: bpo-41060: Avoid SEGFAULT when calling GET_INVALID_TARGET in the grammar (GH-21020) `GET_INVALID_TARGET` might unexpectedly return `NULL`, which if not caught will cause a SEGFAULT. Therefore, this commit introduces a new inline function `RAISE_SYNTAX_ERROR_INVALID_TARGET` that always checks for `GET_INVALID_TARGET` returning NULL and can be used in the grammar, replacing the long C ternary operation used till now. files: M Grammar/python.gram M Lib/test/test_syntax.py M Parser/parser.c M Parser/pegen.h diff --git a/Grammar/python.gram b/Grammar/python.gram index e4abca9388eb0..c5a5dbe1724f3 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -653,9 +653,7 @@ invalid_assignment: | a=expression ':' expression ['=' annotated_rhs] { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } | (star_targets '=')* a=star_expressions '=' { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION( - GET_INVALID_TARGET(a), - "cannot assign to %s", _PyPegen_get_expr_name(GET_INVALID_TARGET(a))) } + RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) } | (star_targets '=')* a=yield_expr '=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "assignment to yield expression not possible") } | a=star_expressions augassign (yield_expr | star_expressions) { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( @@ -665,12 +663,7 @@ invalid_assignment: )} invalid_del_stmt: | 'del' a=star_expressions { - GET_INVALID_DEL_TARGET(a) != NULL ? - RAISE_SYNTAX_ERROR_KNOWN_LOCATION( - GET_INVALID_DEL_TARGET(a), - "cannot delete %s", _PyPegen_get_expr_name(GET_INVALID_DEL_TARGET(a)) - ) : - RAISE_SYNTAX_ERROR("invalid syntax") } + RAISE_SYNTAX_ERROR_INVALID_TARGET(DEL_TARGETS, a) } invalid_block: | NEWLINE !INDENT { RAISE_INDENTATION_ERROR("expected an indented block") } invalid_comprehension: @@ -695,19 +688,11 @@ invalid_double_type_comments: RAISE_SYNTAX_ERROR("Cannot have two type comments on def") } invalid_with_item: | expression 'as' a=expression { - RAISE_SYNTAX_ERROR_KNOWN_LOCATION( - GET_INVALID_TARGET(a), - "cannot assign to %s", _PyPegen_get_expr_name(GET_INVALID_TARGET(a)) - ) } + RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) } invalid_for_target: | ASYNC? 'for' a=star_expressions { - GET_INVALID_FOR_TARGET(a) != NULL ? - RAISE_SYNTAX_ERROR_KNOWN_LOCATION( - GET_INVALID_FOR_TARGET(a), - "cannot assign to %s", _PyPegen_get_expr_name(GET_INVALID_FOR_TARGET(a)) - ) : - RAISE_SYNTAX_ERROR("invalid syntax") } + RAISE_SYNTAX_ERROR_INVALID_TARGET(FOR_TARGETS, a) } invalid_group: | '(' a=starred_expression ')' { diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 9bb3d9ee44448..812a7df3228bc 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -199,6 +199,10 @@ Traceback (most recent call last): SyntaxError: invalid syntax +>>> for a, b +Traceback (most recent call last): +SyntaxError: invalid syntax + >>> with a as b(): pass Traceback (most recent call last): SyntaxError: cannot assign to function call @@ -223,6 +227,10 @@ Traceback (most recent call last): SyntaxError: cannot assign to function call +>>> with a as b +Traceback (most recent call last): +SyntaxError: invalid syntax + >>> p = p = Traceback (most recent call last): SyntaxError: invalid syntax diff --git a/Parser/parser.c b/Parser/parser.c index 1531c99f83891..323cd0e0efae3 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -14818,7 +14818,7 @@ invalid_assignment_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_TARGET ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( GET_INVALID_TARGET ( a ) ) ); + _res = RAISE_SYNTAX_ERROR_INVALID_TARGET ( STAR_TARGETS , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); @@ -14922,7 +14922,7 @@ invalid_del_stmt_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_del_stmt[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'del' star_expressions")); - _res = GET_INVALID_DEL_TARGET ( a ) != NULL ? RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_DEL_TARGET ( a ) , "cannot delete %s" , _PyPegen_get_expr_name ( GET_INVALID_DEL_TARGET ( a ) ) ) : RAISE_SYNTAX_ERROR ( "invalid syntax" ); + _res = RAISE_SYNTAX_ERROR_INVALID_TARGET ( DEL_TARGETS , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); @@ -15379,7 +15379,7 @@ invalid_with_item_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_with_item[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression 'as' expression")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_TARGET ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( GET_INVALID_TARGET ( a ) ) ); + _res = RAISE_SYNTAX_ERROR_INVALID_TARGET ( STAR_TARGETS , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); @@ -15427,7 +15427,7 @@ invalid_for_target_rule(Parser *p) ) { D(fprintf(stderr, "%*c+ invalid_for_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "ASYNC? 'for' star_expressions")); - _res = GET_INVALID_FOR_TARGET ( a ) != NULL ? RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( GET_INVALID_FOR_TARGET ( a ) , "cannot assign to %s" , _PyPegen_get_expr_name ( GET_INVALID_FOR_TARGET ( a ) ) ) : RAISE_SYNTAX_ERROR ( "invalid syntax" ); + _res = RAISE_SYNTAX_ERROR_INVALID_TARGET ( FOR_TARGETS , a ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); diff --git a/Parser/pegen.h b/Parser/pegen.h index ef095dda49fd7..f407709863c69 100644 --- a/Parser/pegen.h +++ b/Parser/pegen.h @@ -269,9 +269,28 @@ typedef enum { FOR_TARGETS } TARGETS_TYPE; expr_ty _PyPegen_get_invalid_target(expr_ty e, TARGETS_TYPE targets_type); -#define GET_INVALID_TARGET(e) (expr_ty)CHECK(_PyPegen_get_invalid_target(e, STAR_TARGETS)) -#define GET_INVALID_DEL_TARGET(e) (expr_ty)CHECK_NULL_ALLOWED(_PyPegen_get_invalid_target(e, DEL_TARGETS)) -#define GET_INVALID_FOR_TARGET(e) (expr_ty)CHECK_NULL_ALLOWED(_PyPegen_get_invalid_target(e, FOR_TARGETS)) +#define RAISE_SYNTAX_ERROR_INVALID_TARGET(type, e) _RAISE_SYNTAX_ERROR_INVALID_TARGET(p, type, e) + +Py_LOCAL_INLINE(void *) +_RAISE_SYNTAX_ERROR_INVALID_TARGET(Parser *p, TARGETS_TYPE type, void *e) +{ + expr_ty invalid_target = CHECK_NULL_ALLOWED(_PyPegen_get_invalid_target(e, type)); + if (invalid_target != NULL) { + const char *msg; + if (type == STAR_TARGETS || type == FOR_TARGETS) { + msg = "cannot assign to %s"; + } + else { + msg = "cannot delete %s"; + } + return RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + invalid_target, + msg, + _PyPegen_get_expr_name(invalid_target) + ); + } + return RAISE_SYNTAX_ERROR("invalid syntax"); +} void *_PyPegen_arguments_parsing_error(Parser *, expr_ty); void *_PyPegen_nonparen_genexp_in_call(Parser *p, expr_ty args); From webhook-mailer at python.org Sun Jun 21 04:08:10 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sun, 21 Jun 2020 08:08:10 -0000 Subject: [Python-checkins] bpo-41058: Use source file encoding in pdb.find_function(). (GH-21010) Message-ID: https://github.com/python/cpython/commit/19fcffa92773e008e4f5efb80047420a0cfafeec commit: 19fcffa92773e008e4f5efb80047420a0cfafeec branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-21T11:07:50+03:00 summary: bpo-41058: Use source file encoding in pdb.find_function(). (GH-21010) files: A Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst M Lib/pdb.py M Lib/test/test_pdb.py diff --git a/Lib/pdb.py b/Lib/pdb.py index 701386e8b96c2..081023526c0ea 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -79,6 +79,7 @@ import pprint import signal import inspect +import tokenize import traceback import linecache @@ -93,7 +94,7 @@ class Restart(Exception): def find_function(funcname, filename): cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname)) try: - fp = open(filename) + fp = tokenize.open(filename) except OSError: return None # consumer of this info expects the first line to be 1 diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index fcb7e4e6072cb..1e8b12a9af0d8 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -5,6 +5,7 @@ import pdb import sys import types +import codecs import unittest import subprocess import textwrap @@ -1226,9 +1227,7 @@ def run_pdb_module(self, script, commands): return self._run_pdb(['-m', self.module_name], commands) def _assert_find_function(self, file_content, func_name, expected): - file_content = textwrap.dedent(file_content) - - with open(support.TESTFN, 'w') as f: + with open(support.TESTFN, 'wb') as f: f.write(file_content) expected = None if not expected else ( @@ -1237,22 +1236,49 @@ def _assert_find_function(self, file_content, func_name, expected): expected, pdb.find_function(func_name, support.TESTFN)) def test_find_function_empty_file(self): - self._assert_find_function('', 'foo', None) + self._assert_find_function(b'', 'foo', None) def test_find_function_found(self): self._assert_find_function( """\ - def foo(): - pass +def foo(): + pass - def bar(): - pass +def b?r(): + pass - def quux(): - pass - """, - 'bar', - ('bar', 4), +def quux(): + pass +""".encode(), + 'b?r', + ('b?r', 4), + ) + + def test_find_function_found_with_encoding_cookie(self): + self._assert_find_function( + """\ +# coding: iso-8859-15 +def foo(): + pass + +def b?r(): + pass + +def quux(): + pass +""".encode('iso-8859-15'), + 'b?r', + ('b?r', 5), + ) + + def test_find_function_found_with_bom(self): + self._assert_find_function( + codecs.BOM_UTF8 + """\ +def b?r(): + pass +""".encode(), + 'b?r', + ('b?r', 1), ) def test_issue7964(self): diff --git a/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst b/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst new file mode 100644 index 0000000000000..6ac90098aa52b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst @@ -0,0 +1 @@ +:func:`pdb.find_function` now correctly determines the source file encoding. From webhook-mailer at python.org Sun Jun 21 04:11:22 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sun, 21 Jun 2020 08:11:22 -0000 Subject: [Python-checkins] bpo-41055: Remove outdated tests for the tp_print slot. (GH-21006) Message-ID: https://github.com/python/cpython/commit/f9bab74d5b34c64cf061e1629ff5f3092a4ca9b3 commit: f9bab74d5b34c64cf061e1629ff5f3092a4ca9b3 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-21T11:11:17+03:00 summary: bpo-41055: Remove outdated tests for the tp_print slot. (GH-21006) files: M Lib/test/list_tests.py M Lib/test/test_bool.py M Lib/test/test_complex.py M Lib/test/test_defaultdict.py M Lib/test/test_deque.py M Lib/test/test_descr.py M Lib/test/test_set.py M Lib/test/test_unicode.py diff --git a/Lib/test/list_tests.py b/Lib/test/list_tests.py index 44bc2ae6573c1..f7eea88c54a6a 100644 --- a/Lib/test/list_tests.py +++ b/Lib/test/list_tests.py @@ -66,20 +66,6 @@ def test_repr_deep(self): a = self.type2test([a]) self.assertRaises(RecursionError, repr, a) - def test_print(self): - d = self.type2test(range(200)) - d.append(d) - d.extend(range(200,400)) - d.append(d) - d.append(400) - try: - with open(support.TESTFN, "w") as fo: - fo.write(str(d)) - with open(support.TESTFN, "r") as fo: - self.assertEqual(fo.read(), repr(d)) - finally: - os.remove(support.TESTFN) - def test_set_subscript(self): a = self.type2test(range(20)) self.assertRaises(ValueError, a.__setitem__, slice(0, 10, 0), [1,2,3]) diff --git a/Lib/test/test_bool.py b/Lib/test/test_bool.py index 909a59a9d2af6..4c6fba42c0c57 100644 --- a/Lib/test/test_bool.py +++ b/Lib/test/test_bool.py @@ -18,20 +18,11 @@ class C(bool): self.assertRaises(TypeError, int.__new__, bool, 0) - def test_print(self): - try: - with open(support.TESTFN, "w") as fo: - print(False, True, file=fo) - with open(support.TESTFN, "r") as fi: - self.assertEqual(fi.read(), 'False True\n') - finally: - os.remove(support.TESTFN) - def test_repr(self): self.assertEqual(repr(False), 'False') self.assertEqual(repr(True), 'True') - self.assertEqual(eval(repr(False)), False) - self.assertEqual(eval(repr(True)), True) + self.assertIs(eval(repr(False)), False) + self.assertIs(eval(repr(True)), True) def test_str(self): self.assertEqual(str(False), 'False') diff --git a/Lib/test/test_complex.py b/Lib/test/test_complex.py index dee5c7fa308bd..d1f241f7a60c9 100644 --- a/Lib/test/test_complex.py +++ b/Lib/test/test_complex.py @@ -500,22 +500,6 @@ def test(v, expected, test_fn=self.assertEqual): def test_neg(self): self.assertEqual(-(1+6j), -1-6j) - def test_file(self): - a = 3.33+4.43j - b = 5.1+2.3j - - fo = None - try: - fo = open(support.TESTFN, "w") - print(a, b, file=fo) - fo.close() - fo = open(support.TESTFN, "r") - self.assertEqual(fo.read(), ("%s %s\n" % (a, b))) - finally: - if (fo is not None) and (not fo.closed): - fo.close() - support.unlink(support.TESTFN) - def test_getnewargs(self): self.assertEqual((1+2j).__getnewargs__(), (1.0, 2.0)) self.assertEqual((1-2j).__getnewargs__(), (1.0, -2.0)) diff --git a/Lib/test/test_defaultdict.py b/Lib/test/test_defaultdict.py index b48c649fce6ba..68fc449780a3d 100644 --- a/Lib/test/test_defaultdict.py +++ b/Lib/test/test_defaultdict.py @@ -72,27 +72,6 @@ def foo(): return 43 d3[13] self.assertEqual(repr(d3), "defaultdict(%s, {13: 43})" % repr(foo)) - def test_print(self): - d1 = defaultdict() - def foo(): return 42 - d2 = defaultdict(foo, {1: 2}) - # NOTE: We can't use tempfile.[Named]TemporaryFile since this - # code must exercise the tp_print C code, which only gets - # invoked for *real* files. - tfn = tempfile.mktemp() - try: - f = open(tfn, "w+") - try: - print(d1, file=f) - print(d2, file=f) - f.seek(0) - self.assertEqual(f.readline(), repr(d1) + "\n") - self.assertEqual(f.readline(), repr(d2) + "\n") - finally: - f.close() - finally: - os.remove(tfn) - def test_copy(self): d1 = defaultdict() d2 = d1.copy() @@ -160,18 +139,6 @@ def _factory(self): r"sub\(, \{\}\)") - # NOTE: printing a subclass of a builtin type does not call its - # tp_print slot. So this part is essentially the same test as above. - tfn = tempfile.mktemp() - try: - f = open(tfn, "w+") - try: - print(d, file=f) - finally: - f.close() - finally: - os.remove(tfn) - def test_callable_arg(self): self.assertRaises(TypeError, defaultdict, {}) diff --git a/Lib/test/test_deque.py b/Lib/test/test_deque.py index c0f7138254f3f..93cc6ca4f44ec 100644 --- a/Lib/test/test_deque.py +++ b/Lib/test/test_deque.py @@ -66,28 +66,9 @@ def test_maxlen(self): self.assertEqual(list(d), [7, 8, 9]) d = deque(range(200), maxlen=10) d.append(d) - support.unlink(support.TESTFN) - fo = open(support.TESTFN, "w") - try: - fo.write(str(d)) - fo.close() - fo = open(support.TESTFN, "r") - self.assertEqual(fo.read(), repr(d)) - finally: - fo.close() - support.unlink(support.TESTFN) - + self.assertEqual(repr(d)[-30:], ', 198, 199, [...]], maxlen=10)') d = deque(range(10), maxlen=None) self.assertEqual(repr(d), 'deque([0, 1, 2, 3, 4, 5, 6, 7, 8, 9])') - fo = open(support.TESTFN, "w") - try: - fo.write(str(d)) - fo.close() - fo = open(support.TESTFN, "r") - self.assertEqual(fo.read(), repr(d)) - finally: - fo.close() - support.unlink(support.TESTFN) def test_maxlen_zero(self): it = iter(range(100)) @@ -545,21 +526,7 @@ def test_repr(self): e = eval(repr(d)) self.assertEqual(list(d), list(e)) d.append(d) - self.assertIn('...', repr(d)) - - def test_print(self): - d = deque(range(200)) - d.append(d) - try: - support.unlink(support.TESTFN) - fo = open(support.TESTFN, "w") - print(d, file=fo, end='') - fo.close() - fo = open(support.TESTFN, "r") - self.assertEqual(fo.read(), repr(d)) - finally: - fo.close() - support.unlink(support.TESTFN) + self.assertEqual(repr(d)[-20:], '7, 198, 199, [...]])') def test_init(self): self.assertRaises(TypeError, deque, 'abc', 2, 3); diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py index 96cc8de2d98fb..7bb6f2bb4b30b 100644 --- a/Lib/test/test_descr.py +++ b/Lib/test/test_descr.py @@ -3552,13 +3552,6 @@ def __repr__(self): self.assertEqual(o.__str__(), '41') self.assertEqual(o.__repr__(), 'A repr') - capture = io.StringIO() - # Calling str() or not exercises different internal paths. - print(o, file=capture) - print(str(o), file=capture) - self.assertEqual(capture.getvalue(), '41\n41\n') - capture.close() - def test_keyword_arguments(self): # Testing keyword arguments to __init__, __call__... def f(a): return a diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py index e4766ab190be0..9851a998983f8 100644 --- a/Lib/test/test_set.py +++ b/Lib/test/test_set.py @@ -317,20 +317,6 @@ def test_cyclical_repr(self): name = repr(s).partition('(')[0] # strip class name self.assertEqual(repr(s), '%s({%s(...)})' % (name, name)) - def test_cyclical_print(self): - w = ReprWrapper() - s = self.thetype([w]) - w.value = s - fo = open(support.TESTFN, "w") - try: - fo.write(str(s)) - fo.close() - fo = open(support.TESTFN, "r") - self.assertEqual(fo.read(), repr(s)) - finally: - fo.close() - support.unlink(support.TESTFN) - def test_do_not_rehash_dict_keys(self): n = 10 d = dict.fromkeys(map(HashCountingInt, range(n))) @@ -803,17 +789,6 @@ def check_repr_against_values(self): sorted_repr_values.sort() self.assertEqual(result, sorted_repr_values) - def test_print(self): - try: - fo = open(support.TESTFN, "w") - fo.write(str(self.set)) - fo.close() - fo = open(support.TESTFN, "r") - self.assertEqual(fo.read(), repr(self.set)) - finally: - fo.close() - support.unlink(support.TESTFN) - def test_length(self): self.assertEqual(len(self.set), self.length) diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 2ee4e64d63530..6e397161fd98d 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -2215,22 +2215,6 @@ def test_concatenation(self): self.assertEqual(("abc" "def" "ghi"), "abcdefghi") self.assertEqual(("abc" "def" "ghi"), "abcdefghi") - def test_printing(self): - class BitBucket: - def write(self, text): - pass - - out = BitBucket() - print('abc', file=out) - print('abc', 'def', file=out) - print('abc', 'def', file=out) - print('abc', 'def', file=out) - print('abc\n', file=out) - print('abc\n', end=' ', file=out) - print('abc\n', end=' ', file=out) - print('def\n', file=out) - print('def\n', file=out) - def test_ucs4(self): x = '\U00100000' y = x.encode("raw-unicode-escape").decode("raw-unicode-escape") From webhook-mailer at python.org Sun Jun 21 05:45:06 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Sun, 21 Jun 2020 09:45:06 -0000 Subject: [Python-checkins] bpo-41052: Opt out serialization/deserialization for _random.Random (GH-21002) Message-ID: https://github.com/python/cpython/commit/6989af0bc7ea1e9a1acea16794e6f723d7b44110 commit: 6989af0bc7ea1e9a1acea16794e6f723d7b44110 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-21T18:44:58+09:00 summary: bpo-41052: Opt out serialization/deserialization for _random.Random (GH-21002) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-20-22-46-18.bpo-41052.46MPeF.rst M Lib/test/test_random.py M Modules/_randommodule.c M Modules/clinic/_randommodule.c.h diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py index a3710f4aa48a6..a80e71e67e4c6 100644 --- a/Lib/test/test_random.py +++ b/Lib/test/test_random.py @@ -5,6 +5,8 @@ import time import pickle import warnings +import test.support + from functools import partial from math import log, exp, pi, fsum, sin, factorial from test import support @@ -372,6 +374,14 @@ def test_pickling(self): restoredseq = [newgen.random() for i in range(10)] self.assertEqual(origseq, restoredseq) + @test.support.cpython_only + def test_bug_41052(self): + # _random.Random should not be allowed to serialization + import _random + for proto in range(pickle.HIGHEST_PROTOCOL + 1): + r = _random.Random() + self.assertRaises(TypeError, pickle.dumps, r, proto) + def test_bug_1727780(self): # verify that version-2-pickles can be loaded # fine, whether they are created on 32-bit or 64-bit diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-20-22-46-18.bpo-41052.46MPeF.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-22-46-18.bpo-41052.46MPeF.rst new file mode 100644 index 0000000000000..82969bf4a7894 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-22-46-18.bpo-41052.46MPeF.rst @@ -0,0 +1 @@ +Opt out serialization/deserialization for _random.Random diff --git a/Modules/_randommodule.c b/Modules/_randommodule.c index 3e3139e4990cc..b8bc0449c1b1b 100644 --- a/Modules/_randommodule.c +++ b/Modules/_randommodule.c @@ -535,12 +535,30 @@ random_new(PyTypeObject *type, PyObject *args, PyObject *kwds) return (PyObject *)self; } + +/*[clinic input] + +_random.Random.__reduce__ + +[clinic start generated code]*/ + +static PyObject * +_random_Random___reduce___impl(RandomObject *self) +/*[clinic end generated code: output=ddea0dcdb60ffd6d input=bd38ec35fd157e0f]*/ +{ + PyErr_Format(PyExc_TypeError, + "cannot pickle %s object", + Py_TYPE(self)->tp_name); + return NULL; +} + static PyMethodDef random_methods[] = { _RANDOM_RANDOM_RANDOM_METHODDEF _RANDOM_RANDOM_SEED_METHODDEF _RANDOM_RANDOM_GETSTATE_METHODDEF _RANDOM_RANDOM_SETSTATE_METHODDEF _RANDOM_RANDOM_GETRANDBITS_METHODDEF + _RANDOM_RANDOM___REDUCE___METHODDEF {NULL, NULL} /* sentinel */ }; diff --git a/Modules/clinic/_randommodule.c.h b/Modules/clinic/_randommodule.c.h index b3cd435b6f204..3322a370288c3 100644 --- a/Modules/clinic/_randommodule.c.h +++ b/Modules/clinic/_randommodule.c.h @@ -109,4 +109,21 @@ _random_Random_getrandbits(RandomObject *self, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=cc8a23b2757dc6ba input=a9049054013a1b77]*/ + +PyDoc_STRVAR(_random_Random___reduce____doc__, +"__reduce__($self, /)\n" +"--\n" +"\n"); + +#define _RANDOM_RANDOM___REDUCE___METHODDEF \ + {"__reduce__", (PyCFunction)_random_Random___reduce__, METH_NOARGS, _random_Random___reduce____doc__}, + +static PyObject * +_random_Random___reduce___impl(RandomObject *self); + +static PyObject * +_random_Random___reduce__(RandomObject *self, PyObject *Py_UNUSED(ignored)) +{ + return _random_Random___reduce___impl(self); +} +/*[clinic end generated code: output=450f0961c2c92389 input=a9049054013a1b77]*/ From webhook-mailer at python.org Sun Jun 21 15:11:38 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 21 Jun 2020 19:11:38 -0000 Subject: [Python-checkins] bpo-41056: Fix a NULL pointer dereference on MemoryError within the ssl module. (GH-21009) Message-ID: https://github.com/python/cpython/commit/10bf6e482328f622f4b2659e4ad5e3d88f57ba58 commit: 10bf6e482328f622f4b2659e4ad5e3d88f57ba58 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-21T12:11:29-07:00 summary: bpo-41056: Fix a NULL pointer dereference on MemoryError within the ssl module. (GH-21009) Detected by Coverity. (cherry picked from commit eb0d5c38de7f970d8cd8524f4163d831c7720f51) Co-authored-by: Gregory P. Smith files: A Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst M Modules/_ssl/debughelpers.c diff --git a/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst b/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst new file mode 100644 index 0000000000000..1776f0d1cf8a3 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst @@ -0,0 +1 @@ +Fix a NULL pointer dereference within the ssl module during a MemoryError in the keylog callback. (discovered by Coverity) \ No newline at end of file diff --git a/Modules/_ssl/debughelpers.c b/Modules/_ssl/debughelpers.c index 858b3d7955c9c..b840da2f663af 100644 --- a/Modules/_ssl/debughelpers.c +++ b/Modules/_ssl/debughelpers.c @@ -125,6 +125,12 @@ _PySSL_keylog_callback(const SSL *ssl, const char *line) threadstate = PyGILState_Ensure(); + ssl_obj = (PySSLSocket *)SSL_get_app_data(ssl); + assert(PySSLSocket_Check(ssl_obj)); + if (ssl_obj->ctx->keylog_bio == NULL) { + return; + } + /* Allocate a static lock to synchronize writes to keylog file. * The lock is neither released on exit nor on fork(). The lock is * also shared between all SSLContexts although contexts may write to @@ -141,12 +147,6 @@ _PySSL_keylog_callback(const SSL *ssl, const char *line) } } - ssl_obj = (PySSLSocket *)SSL_get_app_data(ssl); - assert(PySSLSocket_Check(ssl_obj)); - if (ssl_obj->ctx->keylog_bio == NULL) { - return; - } - PySSL_BEGIN_ALLOW_THREADS PyThread_acquire_lock(lock, 1); res = BIO_printf(ssl_obj->ctx->keylog_bio, "%s\n", line); From webhook-mailer at python.org Sun Jun 21 15:36:32 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 21 Jun 2020 19:36:32 -0000 Subject: [Python-checkins] bpo-41058: Use source file encoding in pdb.find_function(). (GH-21010) Message-ID: https://github.com/python/cpython/commit/14195597b3a877209c20d00e0ec844234e624d13 commit: 14195597b3a877209c20d00e0ec844234e624d13 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-21T12:36:23-07:00 summary: bpo-41058: Use source file encoding in pdb.find_function(). (GH-21010) (cherry picked from commit 19fcffa92773e008e4f5efb80047420a0cfafeec) Co-authored-by: Serhiy Storchaka files: A Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst M Lib/pdb.py M Lib/test/test_pdb.py diff --git a/Lib/pdb.py b/Lib/pdb.py index bf503f1e73ee1..931a039446187 100755 --- a/Lib/pdb.py +++ b/Lib/pdb.py @@ -79,6 +79,7 @@ import pprint import signal import inspect +import tokenize import traceback import linecache @@ -93,7 +94,7 @@ class Restart(Exception): def find_function(funcname, filename): cre = re.compile(r'def\s+%s\s*[(]' % re.escape(funcname)) try: - fp = open(filename) + fp = tokenize.open(filename) except OSError: return None # consumer of this info expects the first line to be 1 diff --git a/Lib/test/test_pdb.py b/Lib/test/test_pdb.py index 4c38e919a83b7..0e7ae1d86ed80 100644 --- a/Lib/test/test_pdb.py +++ b/Lib/test/test_pdb.py @@ -5,6 +5,7 @@ import pdb import sys import types +import codecs import unittest import subprocess import textwrap @@ -1226,9 +1227,7 @@ def run_pdb_module(self, script, commands): return self._run_pdb(['-m', self.module_name], commands) def _assert_find_function(self, file_content, func_name, expected): - file_content = textwrap.dedent(file_content) - - with open(support.TESTFN, 'w') as f: + with open(support.TESTFN, 'wb') as f: f.write(file_content) expected = None if not expected else ( @@ -1237,22 +1236,49 @@ def _assert_find_function(self, file_content, func_name, expected): expected, pdb.find_function(func_name, support.TESTFN)) def test_find_function_empty_file(self): - self._assert_find_function('', 'foo', None) + self._assert_find_function(b'', 'foo', None) def test_find_function_found(self): self._assert_find_function( """\ - def foo(): - pass +def foo(): + pass - def bar(): - pass +def b?r(): + pass - def quux(): - pass - """, - 'bar', - ('bar', 4), +def quux(): + pass +""".encode(), + 'b?r', + ('b?r', 4), + ) + + def test_find_function_found_with_encoding_cookie(self): + self._assert_find_function( + """\ +# coding: iso-8859-15 +def foo(): + pass + +def b?r(): + pass + +def quux(): + pass +""".encode('iso-8859-15'), + 'b?r', + ('b?r', 5), + ) + + def test_find_function_found_with_bom(self): + self._assert_find_function( + codecs.BOM_UTF8 + """\ +def b?r(): + pass +""".encode(), + 'b?r', + ('b?r', 1), ) def test_issue7964(self): diff --git a/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst b/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst new file mode 100644 index 0000000000000..6ac90098aa52b --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst @@ -0,0 +1 @@ +:func:`pdb.find_function` now correctly determines the source file encoding. From webhook-mailer at python.org Sun Jun 21 19:47:51 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sun, 21 Jun 2020 23:47:51 -0000 Subject: [Python-checkins] bpo-40939: Rename PyPegen* functions to PyParser* (GH-21016) Message-ID: https://github.com/python/cpython/commit/564cd187677ae8d1488c4d8ae649aea34ebbde07 commit: 564cd187677ae8d1488c4d8ae649aea34ebbde07 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-22T00:47:46+01:00 summary: bpo-40939: Rename PyPegen* functions to PyParser* (GH-21016) Rename PyPegen* functions to PyParser*, so that we can remove the old set of PyParser* functions that were using the old parser. files: A Include/parser_interface.h A Misc/NEWS.d/next/Core and Builtins/2020-06-20-19-27-47.bpo-40939.jxJ4yn.rst D Include/internal/pegen_interface.h M Include/Python.h M Include/pythonrun.h M Makefile.pre.in M PCbuild/pythoncore.vcxproj M Parser/peg_api.c M Python/pythonrun.c diff --git a/Include/Python.h b/Include/Python.h index dcd0a57ac1f03..57f71d41d8d47 100644 --- a/Include/Python.h +++ b/Include/Python.h @@ -141,6 +141,7 @@ #include "modsupport.h" #include "compile.h" #include "pythonrun.h" +#include "parser_interface.h" #include "pylifecycle.h" #include "ceval.h" #include "sysmodule.h" diff --git a/Include/internal/pegen_interface.h b/Include/parser_interface.h similarity index 54% rename from Include/internal/pegen_interface.h rename to Include/parser_interface.h index ee4c77ec00676..1c6576d926d8d 100644 --- a/Include/internal/pegen_interface.h +++ b/Include/parser_interface.h @@ -4,43 +4,49 @@ extern "C" { #endif -#ifndef Py_BUILD_CORE -# error "this header requires Py_BUILD_CORE define" -#endif - #include "Python.h" -#include "Python-ast.h" -PyAPI_FUNC(mod_ty) PyPegen_ASTFromString( +#ifndef Py_LIMITED_API +PyAPI_FUNC(struct _mod *) PyParser_ASTFromString( const char *str, const char *filename, int mode, PyCompilerFlags *flags, PyArena *arena); -PyAPI_FUNC(mod_ty) PyPegen_ASTFromStringObject( +PyAPI_FUNC(struct _mod *) PyParser_ASTFromStringObject( const char *str, PyObject* filename, int mode, PyCompilerFlags *flags, PyArena *arena); -PyAPI_FUNC(mod_ty) PyPegen_ASTFromFileObject( +PyAPI_FUNC(struct _mod *) PyParser_ASTFromFile( FILE *fp, - PyObject *filename_ob, + const char *filename, + const char* enc, int mode, + const char *ps1, + const char *ps2, + PyCompilerFlags *flags, + int *errcode, + PyArena *arena); +PyAPI_FUNC(struct _mod *) PyParser_ASTFromFileObject( + FILE *fp, + PyObject *filename_ob, const char *enc, + int mode, const char *ps1, const char *ps2, PyCompilerFlags *flags, int *errcode, PyArena *arena); -PyAPI_FUNC(mod_ty) PyPegen_ASTFromFilename( +PyAPI_FUNC(struct _mod *) PyParser_ASTFromFilename( const char *filename, int mode, PyCompilerFlags *flags, PyArena *arena); - +#endif /* !Py_LIMITED_API */ #ifdef __cplusplus } #endif -#endif /* !Py_PEGENINTERFACE*/ +#endif /* !Py_PEGENINTERFACE */ diff --git a/Include/pythonrun.h b/Include/pythonrun.h index 46091e0921633..d43734b5a12ff 100644 --- a/Include/pythonrun.h +++ b/Include/pythonrun.h @@ -32,57 +32,7 @@ PyAPI_FUNC(int) PyRun_InteractiveLoopFlags( const char *filename, /* decoded from the filesystem encoding */ PyCompilerFlags *flags); -PyAPI_FUNC(struct _mod *) PyParser_ASTFromString( - const char *s, - const char *filename, /* decoded from the filesystem encoding */ - int start, - PyCompilerFlags *flags, - PyArena *arena); -PyAPI_FUNC(struct _mod *) PyParser_ASTFromStringObject( - const char *s, - PyObject *filename, - int start, - PyCompilerFlags *flags, - PyArena *arena); -PyAPI_FUNC(struct _mod *) PyParser_ASTFromFile( - FILE *fp, - const char *filename, /* decoded from the filesystem encoding */ - const char* enc, - int start, - const char *ps1, - const char *ps2, - PyCompilerFlags *flags, - int *errcode, - PyArena *arena); -PyAPI_FUNC(struct _mod *) PyParser_ASTFromFileObject( - FILE *fp, - PyObject *filename, - const char* enc, - int start, - const char *ps1, - const char *ps2, - PyCompilerFlags *flags, - int *errcode, - PyArena *arena); -#endif -#ifndef PyParser_SimpleParseString -#define PyParser_SimpleParseString(S, B) \ - PyParser_SimpleParseStringFlags(S, B, 0) -#define PyParser_SimpleParseFile(FP, S, B) \ - PyParser_SimpleParseFileFlags(FP, S, B, 0) -#endif -PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlags(const char *, int, - int); -#if !defined(Py_LIMITED_API) || Py_LIMITED_API+0 >= 0x03030000 -PyAPI_FUNC(struct _node *) PyParser_SimpleParseStringFlagsFilename(const char *, - const char *, - int, int); -#endif -PyAPI_FUNC(struct _node *) PyParser_SimpleParseFileFlags(FILE *, const char *, - int, int); - -#ifndef Py_LIMITED_API PyAPI_FUNC(PyObject *) PyRun_StringFlags(const char *, int, PyObject *, PyObject *, PyCompilerFlags *); diff --git a/Makefile.pre.in b/Makefile.pre.in index fc6dc434e0a18..24dddcf56f8fa 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -307,7 +307,7 @@ PEGEN_OBJS= \ PEGEN_HEADERS= \ - $(srcdir)/Include/internal/pegen_interface.h \ + $(srcdir)/Include/parser_interface.h \ $(srcdir)/Parser/pegen.h \ $(srcdir)/Parser/string_parser.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-20-19-27-47.bpo-40939.jxJ4yn.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-19-27-47.bpo-40939.jxJ4yn.rst new file mode 100644 index 0000000000000..7024dfe47ae1c --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-19-27-47.bpo-40939.jxJ4yn.rst @@ -0,0 +1 @@ +Rename `PyPegen*` functions to `PyParser*`, so that we can remove the old set of `PyParser*` functions that were using the old parser, but keep everything backwards-compatible. \ No newline at end of file diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 0f9110e08d65b..fc99d7748a01f 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -162,7 +162,6 @@ - @@ -213,6 +212,7 @@ + diff --git a/Parser/peg_api.c b/Parser/peg_api.c index b947c78076545..8381d5e86b0db 100644 --- a/Parser/peg_api.c +++ b/Parser/peg_api.c @@ -1,23 +1,23 @@ -#include "pegen_interface.h" +#include "parser_interface.h" #include "tokenizer.h" #include "pegen.h" mod_ty -PyPegen_ASTFromString(const char *str, const char *filename, int mode, +PyParser_ASTFromString(const char *str, const char *filename, int mode, PyCompilerFlags *flags, PyArena *arena) { PyObject *filename_ob = PyUnicode_FromString(filename); if (filename_ob == NULL) { return NULL; } - mod_ty result = PyPegen_ASTFromStringObject(str, filename_ob, mode, flags, arena); + mod_ty result = PyParser_ASTFromStringObject(str, filename_ob, mode, flags, arena); Py_XDECREF(filename_ob); return result; } mod_ty -PyPegen_ASTFromStringObject(const char *str, PyObject* filename, int mode, +PyParser_ASTFromStringObject(const char *str, PyObject* filename, int mode, PyCompilerFlags *flags, PyArena *arena) { if (PySys_Audit("compile", "yO", str, filename) < 0) { @@ -29,7 +29,7 @@ PyPegen_ASTFromStringObject(const char *str, PyObject* filename, int mode, } mod_ty -PyPegen_ASTFromFilename(const char *filename, int mode, PyCompilerFlags *flags, PyArena *arena) +PyParser_ASTFromFilename(const char *filename, int mode, PyCompilerFlags *flags, PyArena *arena) { PyObject *filename_ob = PyUnicode_FromString(filename); if (filename_ob == NULL) { @@ -42,8 +42,23 @@ PyPegen_ASTFromFilename(const char *filename, int mode, PyCompilerFlags *flags, } mod_ty -PyPegen_ASTFromFileObject(FILE *fp, PyObject *filename_ob, int mode, - const char *enc, const char *ps1, const char* ps2, +PyParser_ASTFromFile(FILE *fp, const char *filename, const char *enc, + int mode, const char *ps1, const char* ps2, + PyCompilerFlags *flags, int *errcode, PyArena *arena) +{ + PyObject *filename_ob = PyUnicode_FromString(filename); + if (filename_ob == NULL) { + return NULL; + } + mod_ty result = PyParser_ASTFromFileObject(fp, filename_ob, enc, mode, + ps1, ps2, flags, errcode, arena); + Py_XDECREF(filename_ob); + return result; +} + +mod_ty +PyParser_ASTFromFileObject(FILE *fp, PyObject *filename_ob, const char *enc, + int mode, const char *ps1, const char* ps2, PyCompilerFlags *flags, int *errcode, PyArena *arena) { if (PySys_Audit("compile", "OO", Py_None, filename_ob) < 0) { diff --git a/Python/pythonrun.c b/Python/pythonrun.c index 04fad04227df9..ff80103050e4e 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -26,7 +26,7 @@ #include "symtable.h" // PySymtable_BuildObject() #include "marshal.h" // PyMarshal_ReadLongFromFile() -#include "pegen_interface.h" // PyPegen_ASTFrom* +#include "parser_interface.h" // PyParser_ASTFrom* #ifdef MS_WINDOWS # include "malloc.h" // alloca() @@ -205,8 +205,8 @@ PyRun_InteractiveOneObjectEx(FILE *fp, PyObject *filename, return -1; } - mod = PyPegen_ASTFromFileObject(fp, filename, Py_single_input, - enc, ps1, ps2, flags, &errcode, arena); + mod = PyParser_ASTFromFileObject(fp, filename, enc, Py_single_input, + ps1, ps2, flags, &errcode, arena); Py_XDECREF(v); Py_XDECREF(w); @@ -1026,7 +1026,7 @@ PyRun_StringFlags(const char *str, int start, PyObject *globals, if (arena == NULL) return NULL; - mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); + mod = PyParser_ASTFromStringObject(str, filename, start, flags, arena); if (mod != NULL) ret = run_mod(mod, filename, globals, locals, flags, arena); @@ -1051,8 +1051,8 @@ PyRun_FileExFlags(FILE *fp, const char *filename_str, int start, PyObject *globa if (arena == NULL) goto exit; - mod = PyPegen_ASTFromFileObject(fp, filename, start, NULL, NULL, NULL, - flags, NULL, arena); + mod = PyParser_ASTFromFileObject(fp, filename, NULL, start, NULL, NULL, + flags, NULL, arena); if (closeit) fclose(fp); @@ -1200,7 +1200,7 @@ Py_CompileStringObject(const char *str, PyObject *filename, int start, if (arena == NULL) return NULL; - mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); + mod = PyParser_ASTFromStringObject(str, filename, start, flags, arena); if (mod == NULL) { PyArena_Free(arena); return NULL; @@ -1303,7 +1303,7 @@ _Py_SymtableStringObjectFlags(const char *str, PyObject *filename, int start, Py if (arena == NULL) return NULL; - mod = PyPegen_ASTFromStringObject(str, filename, start, flags, arena); + mod = PyParser_ASTFromStringObject(str, filename, start, flags, arena); if (mod == NULL) { PyArena_Free(arena); return NULL; From webhook-mailer at python.org Sun Jun 21 19:59:52 2020 From: webhook-mailer at python.org (Joannah Nanjekye) Date: Sun, 21 Jun 2020 23:59:52 -0000 Subject: [Python-checkins] Skip tests to fix bot (GH-20777) Message-ID: https://github.com/python/cpython/commit/6f79838fc1cbc8a92df35f44f2fb327d61f70ea9 commit: 6f79838fc1cbc8a92df35f44f2fb327d61f70ea9 branch: master author: Joannah Nanjekye <33177550+nanjekyejoannah at users.noreply.github.com> committer: GitHub date: 2020-06-21T20:59:43-03:00 summary: Skip tests to fix bot (GH-20777) Co-authored-by: nanjekyejoannah files: M Lib/test/test__xxsubinterpreters.py M Lib/test/test_interpreters.py diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index 550a847616cdc..eab8f9f56c82a 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -473,6 +473,7 @@ def test_main(self): main = interpreters.get_main() self.assertTrue(interpreters.is_running(main)) + @unittest.skip('Fails on FreeBSD') def test_subinterpreter(self): interp = interpreters.create() self.assertFalse(interpreters.is_running(interp)) diff --git a/Lib/test/test_interpreters.py b/Lib/test/test_interpreters.py index 58258bb66af8a..8d44d497bd250 100644 --- a/Lib/test/test_interpreters.py +++ b/Lib/test/test_interpreters.py @@ -244,6 +244,7 @@ def test_main(self): main = interpreters.get_main() self.assertTrue(main.is_running()) + @unittest.skip('Fails on FreeBSD') def test_subinterpreter(self): interp = interpreters.create() self.assertFalse(interp.is_running()) @@ -371,6 +372,7 @@ def f(): t.start() t.join() + @unittest.skip('Fails on FreeBSD') def test_still_running(self): main, = interpreters.list_all() interp = interpreters.create() @@ -428,6 +430,7 @@ def test_fork(self): content = file.read() self.assertEqual(content, expected) + @unittest.skip('Fails on FreeBSD') def test_already_running(self): interp = interpreters.create() with _running(interp): From webhook-mailer at python.org Mon Jun 22 03:27:25 2020 From: webhook-mailer at python.org (Gregory P. Smith) Date: Mon, 22 Jun 2020 07:27:25 -0000 Subject: [Python-checkins] bpo-41056: Fix reference to deallocated stack in pathconfig (Coverity) (GH-21013) Message-ID: https://github.com/python/cpython/commit/81328f30703bd7225e7e73aedb0994a7293ce190 commit: 81328f30703bd7225e7e73aedb0994a7293ce190 branch: master author: Gregory P. Smith committer: GitHub date: 2020-06-22T00:27:20-07:00 summary: bpo-41056: Fix reference to deallocated stack in pathconfig (Coverity) (GH-21013) Reported by Coverity. (CID 1457554 RETURN_LOCAL) path0 is assigned as a pointer to this right before it goes out of scope. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst M Python/pathconfig.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst new file mode 100644 index 0000000000000..25f93c9da3105 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst @@ -0,0 +1 @@ +Fixes a reference to deallocated stack space during startup when constructing sys.path involving a relative symlink when code was supplied via -c. (discovered via Coverity) \ No newline at end of file diff --git a/Python/pathconfig.c b/Python/pathconfig.c index fe3ac3ee3d812..5c38041d7667b 100644 --- a/Python/pathconfig.c +++ b/Python/pathconfig.c @@ -686,6 +686,7 @@ _PyPathConfig_ComputeSysPath0(const PyWideStringList *argv, PyObject **path0_p) #ifdef HAVE_READLINK wchar_t link[MAXPATHLEN + 1]; int nr = 0; + wchar_t path0copy[2 * MAXPATHLEN + 1]; if (have_script_arg) { nr = _Py_wreadlink(path0, link, Py_ARRAY_LENGTH(link)); @@ -708,7 +709,6 @@ _PyPathConfig_ComputeSysPath0(const PyWideStringList *argv, PyObject **path0_p) } else { /* Must make a copy, path0copy has room for 2 * MAXPATHLEN */ - wchar_t path0copy[2 * MAXPATHLEN + 1]; wcsncpy(path0copy, path0, MAXPATHLEN); q = wcsrchr(path0copy, SEP); wcsncpy(q+1, link, MAXPATHLEN); From webhook-mailer at python.org Mon Jun 22 03:39:40 2020 From: webhook-mailer at python.org (Gregory P. Smith) Date: Mon, 22 Jun 2020 07:39:40 -0000 Subject: [Python-checkins] bpo-41056: Fix a possible MemoryError leak within zoneinfo. (GH-21007) Message-ID: https://github.com/python/cpython/commit/d780fa7931d8ce94994827232d7cca79b0be3bf1 commit: d780fa7931d8ce94994827232d7cca79b0be3bf1 branch: master author: Gregory P. Smith committer: GitHub date: 2020-06-22T00:39:28-07:00 summary: bpo-41056: Fix a possible MemoryError leak within zoneinfo. (GH-21007) This was detected by our Coverity scan as a REVERSE_INULL issue. Automerge-Triggered-By: @gpshead files: A Misc/NEWS.d/next/Library/2020-06-20-18-33-03.bpo-41056.gTH4Bq.rst M Modules/_zoneinfo.c diff --git a/Misc/NEWS.d/next/Library/2020-06-20-18-33-03.bpo-41056.gTH4Bq.rst b/Misc/NEWS.d/next/Library/2020-06-20-18-33-03.bpo-41056.gTH4Bq.rst new file mode 100644 index 0000000000000..0439d82a50ad1 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-18-33-03.bpo-41056.gTH4Bq.rst @@ -0,0 +1 @@ +Fixed an instance where a MemoryError within the zoneinfo module might not be reported or not reported at its source. (found by Coverity) \ No newline at end of file diff --git a/Modules/_zoneinfo.c b/Modules/_zoneinfo.c index e8b28319993a1..a2883495fe7fd 100644 --- a/Modules/_zoneinfo.c +++ b/Modules/_zoneinfo.c @@ -278,13 +278,11 @@ zoneinfo_new(PyTypeObject *type, PyObject *args, PyObject *kw) instance = PyObject_CallMethod(weak_cache, "setdefault", "OO", key, tmp); - ((PyZoneInfo_ZoneInfo *)instance)->source = SOURCE_CACHE; - Py_DECREF(tmp); - if (instance == NULL) { return NULL; } + ((PyZoneInfo_ZoneInfo *)instance)->source = SOURCE_CACHE; } update_strong_cache(type, key, instance); @@ -1622,7 +1620,7 @@ parse_abbr(const char *const p, PyObject **abbr) } *abbr = PyUnicode_FromStringAndSize(str_start, str_end - str_start); - if (abbr == NULL) { + if (*abbr == NULL) { return -1; } From webhook-mailer at python.org Mon Jun 22 03:41:53 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Mon, 22 Jun 2020 07:41:53 -0000 Subject: [Python-checkins] bpo-41061: Fix incorrect expressions in hashtable (GH-21028) Message-ID: https://github.com/python/cpython/commit/4901ea952691ad70aae21cfe04b6bd363b5a6aff commit: 4901ea952691ad70aae21cfe04b6bd363b5a6aff branch: master author: Christian Heimes committer: GitHub date: 2020-06-22T00:41:48-07:00 summary: bpo-41061: Fix incorrect expressions in hashtable (GH-21028) Signed-off-by: Christian Heimes files: A Misc/NEWS.d/next/Core and Builtins/2020-06-21-10-54-02.bpo-41061.AHf9MU.rst M Modules/_testinternalcapi.c M Python/hashtable.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-21-10-54-02.bpo-41061.AHf9MU.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-21-10-54-02.bpo-41061.AHf9MU.rst new file mode 100644 index 0000000000000..b5bb81621b7f2 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-21-10-54-02.bpo-41061.AHf9MU.rst @@ -0,0 +1 @@ +Fix incorrect expressions and asserts in hashtable code and tests. diff --git a/Modules/_testinternalcapi.c b/Modules/_testinternalcapi.c index 7970e2f4f443f..ad74af8363ef4 100644 --- a/Modules/_testinternalcapi.c +++ b/Modules/_testinternalcapi.c @@ -197,8 +197,8 @@ test_hashtable(PyObject *self, PyObject *Py_UNUSED(args)) for (key='a'; key <= 'z'; key++) { _Py_hashtable_entry_t *entry = _Py_hashtable_get_entry(table, TO_PTR(key)); assert(entry != NULL); - assert(entry->key = TO_PTR(key)); - assert(entry->value = TO_PTR(VALUE(key))); + assert(entry->key == TO_PTR(key)); + assert(entry->value == TO_PTR(VALUE(key))); } // Test _Py_hashtable_get() diff --git a/Python/hashtable.c b/Python/hashtable.c index b92e8ca08c7e1..09501de199b0e 100644 --- a/Python/hashtable.c +++ b/Python/hashtable.c @@ -133,7 +133,7 @@ _Py_hashtable_get_entry_generic(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = ht->hash_func(key); size_t index = key_hash & (ht->nbuckets - 1); - _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { return NULL; @@ -155,7 +155,7 @@ _Py_hashtable_get_entry_ptr(_Py_hashtable_t *ht, const void *key) { Py_uhash_t key_hash = _Py_hashtable_hash_ptr(key); size_t index = key_hash & (ht->nbuckets - 1); - _Py_hashtable_entry_t *entry = entry = TABLE_HEAD(ht, index); + _Py_hashtable_entry_t *entry = TABLE_HEAD(ht, index); while (1) { if (entry == NULL) { return NULL; From webhook-mailer at python.org Mon Jun 22 03:43:40 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 22 Jun 2020 07:43:40 -0000 Subject: [Python-checkins] bpo-40824: Do not mask errors in __iter__ in "in" and the operator module. (GH-20537) Message-ID: https://github.com/python/cpython/commit/cafe1b6e9d3594a34aba50e872d4198296ffaadf commit: cafe1b6e9d3594a34aba50e872d4198296ffaadf branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-22T10:43:35+03:00 summary: bpo-40824: Do not mask errors in __iter__ in "in" and the operator module. (GH-20537) Unexpected errors in calling the __iter__ method are no longer masked by TypeError in the "in" operator and functions operator.contains(), operator.indexOf() and operator.countOf(). files: A Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst M Lib/test/test_iter.py M Lib/test/test_operator.py M Objects/abstract.c diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py index 6aceda23e9bd8..524346939886d 100644 --- a/Lib/test/test_iter.py +++ b/Lib/test/test_iter.py @@ -76,6 +76,10 @@ def __getitem__(self, i): return i __iter__ = None +class BadIterableClass: + def __iter__(self): + raise ZeroDivisionError + # Main test suite class TestCase(unittest.TestCase): @@ -658,6 +662,7 @@ def test_in_and_not_in(self): self.assertRaises(TypeError, lambda: 3 in 12) self.assertRaises(TypeError, lambda: 3 not in map) + self.assertRaises(ZeroDivisionError, lambda: 3 in BadIterableClass()) d = {"one": 1, "two": 2, "three": 3, 1j: 2j} for k in d: @@ -740,6 +745,7 @@ def test_indexOf(self): self.assertRaises(TypeError, indexOf, 42, 1) self.assertRaises(TypeError, indexOf, indexOf, indexOf) + self.assertRaises(ZeroDivisionError, indexOf, BadIterableClass(), 1) f = open(TESTFN, "w") try: @@ -1027,6 +1033,7 @@ def test_free_after_iterating(self): def test_error_iter(self): for typ in (DefaultIterClass, NoIterClass): self.assertRaises(TypeError, iter, typ()) + self.assertRaises(ZeroDivisionError, iter, BadIterableClass()) def test_main(): diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py index f46d94a226717..29f5e4275c55e 100644 --- a/Lib/test/test_operator.py +++ b/Lib/test/test_operator.py @@ -35,6 +35,10 @@ def __mul__(self, other): def __rmul__(self, other): return other * self.lst +class BadIterable: + def __iter__(self): + raise ZeroDivisionError + class OperatorTestCase: def test_lt(self): @@ -142,6 +146,7 @@ def test_countOf(self): operator = self.module self.assertRaises(TypeError, operator.countOf) self.assertRaises(TypeError, operator.countOf, None, None) + self.assertRaises(ZeroDivisionError, operator.countOf, BadIterable(), 1) self.assertEqual(operator.countOf([1, 2, 1, 3, 1, 4], 3), 1) self.assertEqual(operator.countOf([1, 2, 1, 3, 1, 4], 5), 0) @@ -176,6 +181,7 @@ def test_indexOf(self): operator = self.module self.assertRaises(TypeError, operator.indexOf) self.assertRaises(TypeError, operator.indexOf, None, None) + self.assertRaises(ZeroDivisionError, operator.indexOf, BadIterable(), 1) self.assertEqual(operator.indexOf([4, 3, 2, 1], 3), 1) self.assertRaises(ValueError, operator.indexOf, [4, 3, 2, 1], 0) @@ -258,6 +264,7 @@ def test_contains(self): operator = self.module self.assertRaises(TypeError, operator.contains) self.assertRaises(TypeError, operator.contains, None, None) + self.assertRaises(ZeroDivisionError, operator.contains, BadIterable(), 1) self.assertTrue(operator.contains(range(4), 2)) self.assertFalse(operator.contains(range(4), 5)) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst new file mode 100644 index 0000000000000..73c593c04a0da --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst @@ -0,0 +1,4 @@ +Unexpected errors in calling the ``__iter__`` method are no longer masked by +``TypeError`` in the :keyword:`in` operator and functions +:func:`~operator.contains`, :func:`~operator.indexOf` and +:func:`~operator.countOf` of the :mod:`operator` module. diff --git a/Objects/abstract.c b/Objects/abstract.c index 973c43fe7fda9..aac42c2898cdf 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -2083,7 +2083,9 @@ _PySequence_IterSearch(PyObject *seq, PyObject *obj, int operation) it = PyObject_GetIter(seq); if (it == NULL) { - type_error("argument of type '%.200s' is not iterable", seq); + if (PyErr_ExceptionMatches(PyExc_TypeError)) { + type_error("argument of type '%.200s' is not iterable", seq); + } return -1; } From webhook-mailer at python.org Mon Jun 22 03:43:46 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Jun 2020 07:43:46 -0000 Subject: [Python-checkins] bpo-41056: Fix reference to deallocated stack in pathconfig (Coverity) (GH-21013) Message-ID: https://github.com/python/cpython/commit/d5ee9b9940ba24120838b07061058afe931cfff1 commit: d5ee9b9940ba24120838b07061058afe931cfff1 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-22T00:43:41-07:00 summary: bpo-41056: Fix reference to deallocated stack in pathconfig (Coverity) (GH-21013) Reported by Coverity. (CID 1457554 RETURN_LOCAL) path0 is assigned as a pointer to this right before it goes out of scope. (cherry picked from commit 81328f30703bd7225e7e73aedb0994a7293ce190) Co-authored-by: Gregory P. Smith files: A Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst M Python/pathconfig.c diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst new file mode 100644 index 0000000000000..25f93c9da3105 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst @@ -0,0 +1 @@ +Fixes a reference to deallocated stack space during startup when constructing sys.path involving a relative symlink when code was supplied via -c. (discovered via Coverity) \ No newline at end of file diff --git a/Python/pathconfig.c b/Python/pathconfig.c index 258ff613a066c..bf180976b55ab 100644 --- a/Python/pathconfig.c +++ b/Python/pathconfig.c @@ -679,6 +679,7 @@ _PyPathConfig_ComputeSysPath0(const PyWideStringList *argv, PyObject **path0_p) #ifdef HAVE_READLINK wchar_t link[MAXPATHLEN + 1]; int nr = 0; + wchar_t path0copy[2 * MAXPATHLEN + 1]; if (have_script_arg) { nr = _Py_wreadlink(path0, link, Py_ARRAY_LENGTH(link)); @@ -701,7 +702,6 @@ _PyPathConfig_ComputeSysPath0(const PyWideStringList *argv, PyObject **path0_p) } else { /* Must make a copy, path0copy has room for 2 * MAXPATHLEN */ - wchar_t path0copy[2 * MAXPATHLEN + 1]; wcsncpy(path0copy, path0, MAXPATHLEN); q = wcsrchr(path0copy, SEP); wcsncpy(q+1, link, MAXPATHLEN); From webhook-mailer at python.org Mon Jun 22 04:21:15 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Jun 2020 08:21:15 -0000 Subject: [Python-checkins] bpo-40824: Do not mask errors in __iter__ in "in" and the operator module. (GH-20537) Message-ID: https://github.com/python/cpython/commit/b99824a8e14d94c3c5c29499a08fe70deb477d0c commit: b99824a8e14d94c3c5c29499a08fe70deb477d0c branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-22T01:21:04-07:00 summary: bpo-40824: Do not mask errors in __iter__ in "in" and the operator module. (GH-20537) Unexpected errors in calling the __iter__ method are no longer masked by TypeError in the "in" operator and functions operator.contains(), operator.indexOf() and operator.countOf(). (cherry picked from commit cafe1b6e9d3594a34aba50e872d4198296ffaadf) Co-authored-by: Serhiy Storchaka files: A Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst M Lib/test/test_iter.py M Lib/test/test_operator.py M Objects/abstract.c diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py index 542b28419e2c8..22553d5d2881e 100644 --- a/Lib/test/test_iter.py +++ b/Lib/test/test_iter.py @@ -62,6 +62,10 @@ def __getitem__(self, i): return i __iter__ = None +class BadIterableClass: + def __iter__(self): + raise ZeroDivisionError + # Main test suite class TestCase(unittest.TestCase): @@ -637,6 +641,7 @@ def test_in_and_not_in(self): self.assertRaises(TypeError, lambda: 3 in 12) self.assertRaises(TypeError, lambda: 3 not in map) + self.assertRaises(ZeroDivisionError, lambda: 3 in BadIterableClass()) d = {"one": 1, "two": 2, "three": 3, 1j: 2j} for k in d: @@ -719,6 +724,7 @@ def test_indexOf(self): self.assertRaises(TypeError, indexOf, 42, 1) self.assertRaises(TypeError, indexOf, indexOf, indexOf) + self.assertRaises(ZeroDivisionError, indexOf, BadIterableClass(), 1) f = open(TESTFN, "w") try: @@ -1006,6 +1012,7 @@ def test_free_after_iterating(self): def test_error_iter(self): for typ in (DefaultIterClass, NoIterClass): self.assertRaises(TypeError, iter, typ()) + self.assertRaises(ZeroDivisionError, iter, BadIterableClass()) def test_main(): diff --git a/Lib/test/test_operator.py b/Lib/test/test_operator.py index f46d94a226717..29f5e4275c55e 100644 --- a/Lib/test/test_operator.py +++ b/Lib/test/test_operator.py @@ -35,6 +35,10 @@ def __mul__(self, other): def __rmul__(self, other): return other * self.lst +class BadIterable: + def __iter__(self): + raise ZeroDivisionError + class OperatorTestCase: def test_lt(self): @@ -142,6 +146,7 @@ def test_countOf(self): operator = self.module self.assertRaises(TypeError, operator.countOf) self.assertRaises(TypeError, operator.countOf, None, None) + self.assertRaises(ZeroDivisionError, operator.countOf, BadIterable(), 1) self.assertEqual(operator.countOf([1, 2, 1, 3, 1, 4], 3), 1) self.assertEqual(operator.countOf([1, 2, 1, 3, 1, 4], 5), 0) @@ -176,6 +181,7 @@ def test_indexOf(self): operator = self.module self.assertRaises(TypeError, operator.indexOf) self.assertRaises(TypeError, operator.indexOf, None, None) + self.assertRaises(ZeroDivisionError, operator.indexOf, BadIterable(), 1) self.assertEqual(operator.indexOf([4, 3, 2, 1], 3), 1) self.assertRaises(ValueError, operator.indexOf, [4, 3, 2, 1], 0) @@ -258,6 +264,7 @@ def test_contains(self): operator = self.module self.assertRaises(TypeError, operator.contains) self.assertRaises(TypeError, operator.contains, None, None) + self.assertRaises(ZeroDivisionError, operator.contains, BadIterable(), 1) self.assertTrue(operator.contains(range(4), 2)) self.assertFalse(operator.contains(range(4), 5)) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst new file mode 100644 index 0000000000000..73c593c04a0da --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst @@ -0,0 +1,4 @@ +Unexpected errors in calling the ``__iter__`` method are no longer masked by +``TypeError`` in the :keyword:`in` operator and functions +:func:`~operator.contains`, :func:`~operator.indexOf` and +:func:`~operator.countOf` of the :mod:`operator` module. diff --git a/Objects/abstract.c b/Objects/abstract.c index 4fabfd762f041..12237d570f743 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -1999,7 +1999,9 @@ _PySequence_IterSearch(PyObject *seq, PyObject *obj, int operation) it = PyObject_GetIter(seq); if (it == NULL) { - type_error("argument of type '%.200s' is not iterable", seq); + if (PyErr_ExceptionMatches(PyExc_TypeError)) { + type_error("argument of type '%.200s' is not iterable", seq); + } return -1; } From webhook-mailer at python.org Mon Jun 22 04:22:08 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 22 Jun 2020 08:22:08 -0000 Subject: [Python-checkins] bpo-26407: Do not mask errors in csv. (GH-20536) Message-ID: https://github.com/python/cpython/commit/c88239f864a27f673c0f0a9e62d2488563f9d081 commit: c88239f864a27f673c0f0a9e62d2488563f9d081 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-22T11:21:59+03:00 summary: bpo-26407: Do not mask errors in csv. (GH-20536) Unexpected errors in calling the __iter__ method are no longer masked by TypeError in csv.reader(), csv.writer.writerow() and csv.writer.writerows(). files: A Misc/NEWS.d/next/Library/2020-05-30-14-19-47.bpo-26407.MjWLO1.rst M Lib/test/test_csv.py M Modules/_csv.c diff --git a/Lib/test/test_csv.py b/Lib/test/test_csv.py index a16d14019f341..d421be075ca27 100644 --- a/Lib/test/test_csv.py +++ b/Lib/test/test_csv.py @@ -14,6 +14,12 @@ from textwrap import dedent from collections import OrderedDict + +class BadIterable: + def __iter__(self): + raise OSError + + class Test_Csv(unittest.TestCase): """ Test the underlying C csv parser in ways that are not appropriate @@ -40,9 +46,15 @@ def _test_arg_valid(self, ctor, arg): def test_reader_arg_valid(self): self._test_arg_valid(csv.reader, []) + self.assertRaises(OSError, csv.reader, BadIterable()) def test_writer_arg_valid(self): self._test_arg_valid(csv.writer, StringIO()) + class BadWriter: + @property + def write(self): + raise OSError + self.assertRaises(OSError, csv.writer, BadWriter()) def _test_default_attrs(self, ctor, *args): obj = ctor(*args) @@ -141,6 +153,7 @@ def test_write_arg_valid(self): self._write_test([None], '""') self._write_error_test(csv.Error, [None], quoting = csv.QUOTE_NONE) # Check that exceptions are passed up the chain + self._write_error_test(OSError, BadIterable()) class BadList: def __len__(self): return 10; @@ -230,6 +243,12 @@ def test_writerows_with_none(self): fileobj.seek(0) self.assertEqual(fileobj.read(), 'a\r\n""\r\n') + def test_writerows_errors(self): + with TemporaryFile("w+", newline='') as fileobj: + writer = csv.writer(fileobj) + self.assertRaises(TypeError, writer.writerows, None) + self.assertRaises(OSError, writer.writerows, BadIterable()) + @support.cpython_only def test_writerows_legacy_strings(self): import _testcapi @@ -334,7 +353,6 @@ def test_read_linenum(self): def test_roundtrip_quoteed_newlines(self): with TemporaryFile("w+", newline='') as fileobj: writer = csv.writer(fileobj) - self.assertRaises(TypeError, writer.writerows, None) rows = [['a\nb','b'],['c','x\r\nd']] writer.writerows(rows) fileobj.seek(0) diff --git a/Misc/NEWS.d/next/Library/2020-05-30-14-19-47.bpo-26407.MjWLO1.rst b/Misc/NEWS.d/next/Library/2020-05-30-14-19-47.bpo-26407.MjWLO1.rst new file mode 100644 index 0000000000000..d0e45cf1b1f2f --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-05-30-14-19-47.bpo-26407.MjWLO1.rst @@ -0,0 +1,3 @@ +Unexpected errors in calling the ``__iter__`` method are no longer masked +by ``TypeError`` in :func:`csv.reader`, :func:`csv.writer.writerow` and +:meth:`csv.writer.writerows`. diff --git a/Modules/_csv.c b/Modules/_csv.c index 2d4247740eb29..da61db9377f94 100644 --- a/Modules/_csv.c +++ b/Modules/_csv.c @@ -956,8 +956,6 @@ csv_reader(PyObject *module, PyObject *args, PyObject *keyword_args) } self->input_iter = PyObject_GetIter(iterator); if (self->input_iter == NULL) { - PyErr_SetString(PyExc_TypeError, - "argument 1 must be an iterator"); Py_DECREF(self); return NULL; } @@ -1163,10 +1161,14 @@ csv_writerow(WriterObj *self, PyObject *seq) PyObject *iter, *field, *line, *result; iter = PyObject_GetIter(seq); - if (iter == NULL) - return PyErr_Format(_csvstate_global->error_obj, - "iterable expected, not %.200s", - Py_TYPE(seq)->tp_name); + if (iter == NULL) { + if (PyErr_ExceptionMatches(PyExc_TypeError)) { + PyErr_Format(_csvstate_global->error_obj, + "iterable expected, not %.200s", + Py_TYPE(seq)->tp_name); + } + return NULL; + } /* Join all fields in internal buffer. */ @@ -1256,8 +1258,6 @@ csv_writerows(WriterObj *self, PyObject *seqseq) row_iter = PyObject_GetIter(seqseq); if (row_iter == NULL) { - PyErr_SetString(PyExc_TypeError, - "writerows() argument must be iterable"); return NULL; } while ((row_obj = PyIter_Next(row_iter))) { From webhook-mailer at python.org Mon Jun 22 04:24:21 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 22 Jun 2020 08:24:21 -0000 Subject: [Python-checkins] bpo-41068: Fix read after write in zipfile for non-ASCII files names. (GH-21040) Message-ID: https://github.com/python/cpython/commit/36ff513f82e372ed3cea0bf7cbdf15a1ef6dab9e commit: 36ff513f82e372ed3cea0bf7cbdf15a1ef6dab9e branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-22T11:24:11+03:00 summary: bpo-41068: Fix read after write in zipfile for non-ASCII files names. (GH-21040) files: A Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst M Lib/test/test_zipfile.py M Lib/zipfile.py diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py index c9ca1ddaafe19..b7bc218d17a3d 100644 --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile.py @@ -1600,6 +1600,11 @@ def test_write_unicode_filenames(self): self.assertEqual(zf.filelist[0].filename, "foo.txt") self.assertEqual(zf.filelist[1].filename, "\xf6.txt") + def test_read_after_write_unicode_filenames(self): + with zipfile.ZipFile(TESTFN2, 'w') as zipfp: + zipfp.writestr('???????', b'sample') + self.assertEqual(zipfp.read('???????'), b'sample') + def test_exclusive_create_zip_file(self): """Test exclusive creating a new zipfile.""" unlink(TESTFN2) diff --git a/Lib/zipfile.py b/Lib/zipfile.py index 8903d6a42ee4e..915698f9e0588 100644 --- a/Lib/zipfile.py +++ b/Lib/zipfile.py @@ -1534,7 +1534,7 @@ def open(self, name, mode="r", pwd=None, *, force_zip64=False): # strong encryption raise NotImplementedError("strong encryption (flag bit 6)") - if zinfo.flag_bits & 0x800: + if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS] & 0x800: # UTF-8 filename fname_str = fname.decode("utf-8") else: diff --git a/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst b/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst new file mode 100644 index 0000000000000..20580c7886fac --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst @@ -0,0 +1,2 @@ +Fixed reading files with non-ASCII names from ZIP archive directly after +writing them. From webhook-mailer at python.org Mon Jun 22 04:40:19 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 22 Jun 2020 08:40:19 -0000 Subject: [Python-checkins] bpo-41068: Fix read after write in zipfile for non-ASCII files names. (GH-21040) Message-ID: https://github.com/python/cpython/commit/d7f37d1ed4fd38555e3e5aad32d515c96b528df5 commit: d7f37d1ed4fd38555e3e5aad32d515c96b528df5 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-22T01:40:05-07:00 summary: bpo-41068: Fix read after write in zipfile for non-ASCII files names. (GH-21040) (cherry picked from commit 36ff513f82e372ed3cea0bf7cbdf15a1ef6dab9e) Co-authored-by: Serhiy Storchaka files: A Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst M Lib/test/test_zipfile.py M Lib/zipfile.py diff --git a/Lib/test/test_zipfile.py b/Lib/test/test_zipfile.py index 28e62dc5c61c5..e2e37a1770418 100644 --- a/Lib/test/test_zipfile.py +++ b/Lib/test/test_zipfile.py @@ -1589,6 +1589,11 @@ def test_write_unicode_filenames(self): self.assertEqual(zf.filelist[0].filename, "foo.txt") self.assertEqual(zf.filelist[1].filename, "\xf6.txt") + def test_read_after_write_unicode_filenames(self): + with zipfile.ZipFile(TESTFN2, 'w') as zipfp: + zipfp.writestr('???????', b'sample') + self.assertEqual(zipfp.read('???????'), b'sample') + def test_exclusive_create_zip_file(self): """Test exclusive creating a new zipfile.""" unlink(TESTFN2) diff --git a/Lib/zipfile.py b/Lib/zipfile.py index f7a2a2e8b8ab9..73e89666309ff 100644 --- a/Lib/zipfile.py +++ b/Lib/zipfile.py @@ -1546,7 +1546,7 @@ def open(self, name, mode="r", pwd=None, *, force_zip64=False): # strong encryption raise NotImplementedError("strong encryption (flag bit 6)") - if zinfo.flag_bits & 0x800: + if fheader[_FH_GENERAL_PURPOSE_FLAG_BITS] & 0x800: # UTF-8 filename fname_str = fname.decode("utf-8") else: diff --git a/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst b/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst new file mode 100644 index 0000000000000..20580c7886fac --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst @@ -0,0 +1,2 @@ +Fixed reading files with non-ASCII names from ZIP archive directly after +writing them. From webhook-mailer at python.org Mon Jun 22 05:06:15 2020 From: webhook-mailer at python.org (Krishna Chivukula) Date: Mon, 22 Jun 2020 09:06:15 -0000 Subject: [Python-checkins] bpo-41005: Fixed perrmission error (GH-20936) Message-ID: https://github.com/python/cpython/commit/9e27bc0c1efc7478872f98729f87886e9333548f commit: 9e27bc0c1efc7478872f98729f87886e9333548f branch: master author: Krishna Chivukula <63070026+KrishnaSai2020 at users.noreply.github.com> committer: GitHub date: 2020-06-22T11:06:07+02:00 summary: bpo-41005: Fixed perrmission error (GH-20936) * fixed issue 41005: webbrowser fails when xdg-settings cannot be executed Co-authored-by: KrishnaSai2020 Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> files: A Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 31e1df4247946..cea91308ce1b3 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -550,7 +550,7 @@ def register_standard_browsers(): cmd = "xdg-settings get default-web-browser".split() raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) result = raw_result.decode().strip() - except (FileNotFoundError, subprocess.CalledProcessError): + except (FileNotFoundError, subprocess.CalledProcessError, PermissionError) : pass else: global _os_preferred_browser diff --git a/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst b/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst new file mode 100644 index 0000000000000..3b5f3f23a12f5 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst @@ -0,0 +1 @@ +fixed an XDG settings issue not allowing macos to open browser in webbrowser.py \ No newline at end of file From webhook-mailer at python.org Mon Jun 22 11:27:45 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 22 Jun 2020 15:27:45 -0000 Subject: [Python-checkins] bpo-41078: Rename pycore_tupleobject.h to pycore_tuple.h (GH-21056) Message-ID: https://github.com/python/cpython/commit/384621c42f9102e31ba2c47feba144af09c989e5 commit: 384621c42f9102e31ba2c47feba144af09c989e5 branch: master author: Victor Stinner committer: GitHub date: 2020-06-22T17:27:35+02:00 summary: bpo-41078: Rename pycore_tupleobject.h to pycore_tuple.h (GH-21056) files: A Include/internal/pycore_tuple.h D Include/internal/pycore_tupleobject.h M Makefile.pre.in M Modules/_functoolsmodule.c M Modules/itertoolsmodule.c M Objects/call.c M Objects/codeobject.c M Objects/descrobject.c M Objects/funcobject.c M Objects/listobject.c M Objects/rangeobject.c M Objects/structseq.c M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M Python/bltinmodule.c M Python/ceval.c M Python/getargs.c M Python/sysmodule.c diff --git a/Include/internal/pycore_tupleobject.h b/Include/internal/pycore_tuple.h similarity index 75% rename from Include/internal/pycore_tupleobject.h rename to Include/internal/pycore_tuple.h index f95f16c0ed02f..5353e18d08327 100644 --- a/Include/internal/pycore_tupleobject.h +++ b/Include/internal/pycore_tuple.h @@ -1,5 +1,5 @@ -#ifndef Py_INTERNAL_TUPLEOBJECT_H -#define Py_INTERNAL_TUPLEOBJECT_H +#ifndef Py_INTERNAL_TUPLE_H +#define Py_INTERNAL_TUPLE_H #ifdef __cplusplus extern "C" { #endif @@ -11,9 +11,10 @@ extern "C" { #include "tupleobject.h" /* _PyTuple_CAST() */ #define _PyTuple_ITEMS(op) (_PyTuple_CAST(op)->ob_item) + PyAPI_FUNC(PyObject *) _PyTuple_FromArray(PyObject *const *, Py_ssize_t); #ifdef __cplusplus } #endif -#endif /* !Py_INTERNAL_TUPLEOBJECT_H */ +#endif /* !Py_INTERNAL_TUPLE_H */ diff --git a/Makefile.pre.in b/Makefile.pre.in index 24dddcf56f8fa..a52a97f7969a7 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1120,7 +1120,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_runtime.h \ $(srcdir)/Include/internal/pycore_sysmodule.h \ $(srcdir)/Include/internal/pycore_traceback.h \ - $(srcdir)/Include/internal/pycore_tupleobject.h \ + $(srcdir)/Include/internal/pycore_tuple.h \ $(srcdir)/Include/internal/pycore_warnings.h \ $(DTRACE_HEADERS) diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index f1ee23f294fa3..8120140afac05 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -1,6 +1,6 @@ #include "Python.h" #include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_tupleobject.h" +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include "structmember.h" // PyMemberDef /* _functools module written and maintained diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c index 3f2f7165b171b..3809dc3843c14 100644 --- a/Modules/itertoolsmodule.c +++ b/Modules/itertoolsmodule.c @@ -1,7 +1,7 @@ #define PY_SSIZE_T_CLEAN #include "Python.h" -#include "pycore_tupleobject.h" +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include // offsetof() /* Itertools module written and maintained diff --git a/Objects/call.c b/Objects/call.c index 61426c7e09e4e..30fa14ccfd765 100644 --- a/Objects/call.c +++ b/Objects/call.c @@ -1,11 +1,11 @@ #include "Python.h" -#include "pycore_call.h" -#include "pycore_ceval.h" // _PyEval_EvalFrame() -#include "pycore_object.h" -#include "pycore_pyerrors.h" -#include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_tupleobject.h" -#include "frameobject.h" +#include "pycore_call.h" // _PyObject_CallNoArgTstate() +#include "pycore_ceval.h" // _PyEval_EvalFrame() +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_pyerrors.h" // _PyErr_Occurred() +#include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_tuple.h" // _PyTuple_ITEMS() +#include "frameobject.h" // _PyFrame_New_NoTrack() static PyObject *const * diff --git a/Objects/codeobject.c b/Objects/codeobject.c index cb4fb68124333..49011db1014e7 100644 --- a/Objects/codeobject.c +++ b/Objects/codeobject.c @@ -4,10 +4,10 @@ #include "code.h" #include "opcode.h" #include "structmember.h" // PyMemberDef -#include "pycore_code.h" +#include "pycore_code.h" // _PyOpcache #include "pycore_interp.h" // PyInterpreterState.co_extra_freefuncs #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_tupleobject.h" +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include "clinic/codeobject.c.h" /* Holder for co_extra information */ diff --git a/Objects/descrobject.c b/Objects/descrobject.c index fce9cdd309077..a8ce13c7aa4ba 100644 --- a/Objects/descrobject.c +++ b/Objects/descrobject.c @@ -1,10 +1,10 @@ /* Descriptors -- a new, flexible way to describe attributes */ #include "Python.h" -#include "pycore_ceval.h" // _Py_EnterRecursiveCall() -#include "pycore_object.h" -#include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_tupleobject.h" +#include "pycore_ceval.h" // _Py_EnterRecursiveCall() +#include "pycore_object.h" // _PyObject_GC_UNTRACK() +#include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include "structmember.h" // PyMemberDef _Py_IDENTIFIER(getattr); diff --git a/Objects/funcobject.c b/Objects/funcobject.c index bd24f67b9740a..09a188664e861 100644 --- a/Objects/funcobject.c +++ b/Objects/funcobject.c @@ -3,7 +3,6 @@ #include "Python.h" #include "pycore_object.h" -#include "pycore_tupleobject.h" #include "code.h" #include "structmember.h" // PyMemberDef diff --git a/Objects/listobject.c b/Objects/listobject.c index 22cdbe3cfdd41..261a0fdfffae0 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -1,10 +1,10 @@ /* List object implementation */ #include "Python.h" -#include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_object.h" -#include "pycore_tupleobject.h" -#include "pycore_accu.h" +#include "pycore_abstract.h" // _PyIndex_Check() +#include "pycore_interp.h" // PyInterpreterState.list +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_tuple.h" // _PyTuple_FromArray() #ifdef STDC_HEADERS #include diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c index 751dbb9815d82..ba6d425717495 100644 --- a/Objects/rangeobject.c +++ b/Objects/rangeobject.c @@ -1,8 +1,8 @@ /* Range object implementation */ #include "Python.h" -#include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_tupleobject.h" +#include "pycore_abstract.h" // _PyIndex_Check() +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include "structmember.h" // PyMemberDef /* Support objects whose length is > PY_SSIZE_T_MAX. diff --git a/Objects/structseq.c b/Objects/structseq.c index b17b1f99a5bc6..bd20ce3fbdcb9 100644 --- a/Objects/structseq.c +++ b/Objects/structseq.c @@ -8,8 +8,8 @@ */ #include "Python.h" -#include "pycore_tupleobject.h" -#include "pycore_object.h" +#include "pycore_tuple.h" // _PyTuple_FromArray() +#include "pycore_object.h" // _PyObject_GC_TRACK() #include "structmember.h" // PyMemberDef static const char visible_length_key[] = "n_sequence_fields"; diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index fc99d7748a01f..00714757f64f2 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -191,7 +191,7 @@ - + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 12f05acc3a74a..ddcdaf471901c 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -273,7 +273,7 @@ Include - + Include diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index c6ede1cd7f6d6..a582ccda2c384 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -4,10 +4,9 @@ #include #include "ast.h" #undef Yield /* undefine macro conflicting with */ -#include "pycore_object.h" -#include "pycore_pyerrors.h" -#include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_tupleobject.h" +#include "pycore_pyerrors.h" // _PyErr_NoMemory() +#include "pycore_pystate.h" // _PyThreadState_GET() +#include "pycore_tuple.h" // _PyTuple_FromArray() _Py_IDENTIFIER(__builtins__); _Py_IDENTIFIER(__dict__); diff --git a/Python/ceval.c b/Python/ceval.c index 2bd7cb3ef5313..0386929a5b2b3 100644 --- a/Python/ceval.c +++ b/Python/ceval.c @@ -11,17 +11,17 @@ #include "Python.h" #include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_call.h" -#include "pycore_ceval.h" -#include "pycore_code.h" -#include "pycore_initconfig.h" -#include "pycore_object.h" -#include "pycore_pyerrors.h" -#include "pycore_pylifecycle.h" +#include "pycore_call.h" // _PyObject_FastCallDictTstate() +#include "pycore_ceval.h" // _PyEval_SignalAsyncExc() +#include "pycore_code.h" // _PyCode_InitOpcache() +#include "pycore_initconfig.h" // _PyStatus_OK() +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pylifecycle.h" // _PyErr_Print() #include "pycore_pymem.h" // _PyMem_IsPtrFreed() #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "pycore_sysmodule.h" -#include "pycore_tupleobject.h" +#include "pycore_sysmodule.h" // _PySys_Audit() +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include "code.h" #include "dictobject.h" diff --git a/Python/getargs.c b/Python/getargs.c index aaf687a46b7f6..c85ff6d4777d2 100644 --- a/Python/getargs.c +++ b/Python/getargs.c @@ -2,7 +2,7 @@ /* New getargs implementation */ #include "Python.h" -#include "pycore_tupleobject.h" +#include "pycore_tuple.h" // _PyTuple_ITEMS() #include #include diff --git a/Python/sysmodule.c b/Python/sysmodule.c index 3e4115fe8e1f9..f3b5a6afdf1e5 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -15,18 +15,18 @@ Data members: */ #include "Python.h" -#include "code.h" -#include "frameobject.h" // PyFrame_GetBack() #include "pycore_ceval.h" // _Py_RecursionLimitLowerWaterMark() -#include "pycore_initconfig.h" -#include "pycore_object.h" -#include "pycore_pathconfig.h" -#include "pycore_pyerrors.h" -#include "pycore_pylifecycle.h" +#include "pycore_initconfig.h" // _PyStatus_EXCEPTION() +#include "pycore_object.h" // _PyObject_IS_GC() +#include "pycore_pathconfig.h" // _PyPathConfig_ComputeSysPath0() +#include "pycore_pyerrors.h" // _PyErr_Fetch() +#include "pycore_pylifecycle.h" // _PyErr_WriteUnraisableDefaultHook() #include "pycore_pymem.h" // _PyMem_SetDefaultAllocator() #include "pycore_pystate.h" // _PyThreadState_GET() -#include "pycore_tupleobject.h" +#include "pycore_tuple.h" // _PyTuple_FromArray() +#include "code.h" +#include "frameobject.h" // PyFrame_GetBack() #include "pydtrace.h" #include "osdefs.h" // DELIM #include From webhook-mailer at python.org Mon Jun 22 11:39:41 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 22 Jun 2020 15:39:41 -0000 Subject: [Python-checkins] bpo-41078: Add pycore_list.h internal header file (GH-21057) Message-ID: https://github.com/python/cpython/commit/c45dbe93b7094fe014442c198727ee38b25541c4 commit: c45dbe93b7094fe014442c198727ee38b25541c4 branch: master author: Victor Stinner committer: GitHub date: 2020-06-22T17:39:32+02:00 summary: bpo-41078: Add pycore_list.h internal header file (GH-21057) * Move _PyList_ITEMS() to pycore_list.h. * The C extension "_heapq" is now built with Py_BUILD_CORE_MODULE macro defined to access the internal C API. files: A Include/internal/pycore_list.h M Include/cpython/listobject.h M Makefile.pre.in M Modules/Setup M Modules/_heapqmodule.c M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M setup.py diff --git a/Include/cpython/listobject.h b/Include/cpython/listobject.h index b1af5f6764427..70b9d83d8a232 100644 --- a/Include/cpython/listobject.h +++ b/Include/cpython/listobject.h @@ -32,4 +32,3 @@ PyAPI_FUNC(void) _PyList_DebugMallocStats(FILE *out); #define PyList_GET_ITEM(op, i) (_PyList_CAST(op)->ob_item[i]) #define PyList_SET_ITEM(op, i, v) (_PyList_CAST(op)->ob_item[i] = (v)) #define PyList_GET_SIZE(op) Py_SIZE(_PyList_CAST(op)) -#define _PyList_ITEMS(op) (_PyList_CAST(op)->ob_item) diff --git a/Include/internal/pycore_list.h b/Include/internal/pycore_list.h new file mode 100644 index 0000000000000..f18fb052c49c7 --- /dev/null +++ b/Include/internal/pycore_list.h @@ -0,0 +1,20 @@ +#ifndef Py_INTERNAL_LIST_H +#define Py_INTERNAL_LIST_H +#ifdef __cplusplus +extern "C" { +#endif + +#ifndef Py_BUILD_CORE +# error "this header requires Py_BUILD_CORE define" +#endif + +#include "listobject.h" // _PyList_CAST() + + +#define _PyList_ITEMS(op) (_PyList_CAST(op)->ob_item) + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_INTERNAL_LIST_H */ diff --git a/Makefile.pre.in b/Makefile.pre.in index a52a97f7969a7..3428b9842a5a0 100644 --- a/Makefile.pre.in +++ b/Makefile.pre.in @@ -1110,6 +1110,7 @@ PYTHON_HEADERS= \ $(srcdir)/Include/internal/pycore_import.h \ $(srcdir)/Include/internal/pycore_initconfig.h \ $(srcdir)/Include/internal/pycore_interp.h \ + $(srcdir)/Include/internal/pycore_list.h \ $(srcdir)/Include/internal/pycore_object.h \ $(srcdir)/Include/internal/pycore_pathconfig.h \ $(srcdir)/Include/internal/pycore_pyerrors.h \ diff --git a/Modules/Setup b/Modules/Setup index 5d428d5b8baa7..470bf6bc2efbf 100644 --- a/Modules/Setup +++ b/Modules/Setup @@ -180,7 +180,7 @@ _symtable symtablemodule.c #_datetime _datetimemodule.c # datetime accelerator #_zoneinfo _zoneinfo.c # zoneinfo accelerator #_bisect _bisectmodule.c # Bisection algorithms -#_heapq _heapqmodule.c # Heap queue algorithm +#_heapq _heapqmodule.c -DPy_BUILD_CORE_MODULE # Heap queue algorithm #_asyncio _asynciomodule.c # Fast asyncio Future #_json -I$(srcdir)/Include/internal -DPy_BUILD_CORE_BUILTIN _json.c # _json speedups #_statistics _statisticsmodule.c # statistics accelerator diff --git a/Modules/_heapqmodule.c b/Modules/_heapqmodule.c index 193478d79b456..20468c28f2423 100644 --- a/Modules/_heapqmodule.c +++ b/Modules/_heapqmodule.c @@ -7,9 +7,11 @@ annotated by Fran?ois Pinard, and converted to C by Raymond Hettinger. */ #include "Python.h" +#include "pycore_list.h" // _PyList_ITEMS() #include "clinic/_heapqmodule.c.h" + /*[clinic input] module _heapq [clinic start generated code]*/ diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 00714757f64f2..54e23c2c2b505 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -165,8 +165,8 @@ - + @@ -181,6 +181,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index ddcdaf471901c..4ba0bc2ac3bfa 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -240,6 +240,9 @@ Include + + Include + Include diff --git a/setup.py b/setup.py index 648e4e6a8932e..21a5a58981fc1 100644 --- a/setup.py +++ b/setup.py @@ -863,7 +863,8 @@ def detect_simple_extensions(self): # bisect self.add(Extension("_bisect", ["_bisectmodule.c"])) # heapq - self.add(Extension("_heapq", ["_heapqmodule.c"])) + self.add(Extension("_heapq", ["_heapqmodule.c"], + extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) # C-optimized pickle replacement self.add(Extension("_pickle", ["_pickle.c"], extra_compile_args=['-DPy_BUILD_CORE_MODULE'])) From webhook-mailer at python.org Mon Jun 22 11:53:16 2020 From: webhook-mailer at python.org (Dong-hee Na) Date: Mon, 22 Jun 2020 15:53:16 -0000 Subject: [Python-checkins] bpo-1635741: Port _lzma module to multiphase initialization (GH-19382) Message-ID: https://github.com/python/cpython/commit/1937edd376274cb26090d71253191502a9de32d6 commit: 1937edd376274cb26090d71253191502a9de32d6 branch: master author: Dong-hee Na committer: GitHub date: 2020-06-23T00:53:07+09:00 summary: bpo-1635741: Port _lzma module to multiphase initialization (GH-19382) files: A Misc/NEWS.d/next/Core and Builtins/2020-04-05-02-35-08.bpo-1635741.Kfe9fT.rst M Modules/_lzmamodule.c M Modules/clinic/_lzmamodule.c.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-04-05-02-35-08.bpo-1635741.Kfe9fT.rst b/Misc/NEWS.d/next/Core and Builtins/2020-04-05-02-35-08.bpo-1635741.Kfe9fT.rst new file mode 100644 index 0000000000000..956d0b68a8dfb --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-04-05-02-35-08.bpo-1635741.Kfe9fT.rst @@ -0,0 +1 @@ +Port :mod:`_lzma` to multiphase initialization. diff --git a/Modules/_lzmamodule.c b/Modules/_lzmamodule.c index 2a62a68356850..24e1d6c2884ce 100644 --- a/Modules/_lzmamodule.c +++ b/Modules/_lzmamodule.c @@ -23,6 +23,20 @@ } } while (0) #define RELEASE_LOCK(obj) PyThread_release_lock((obj)->lock) +typedef struct { + PyTypeObject *lzma_compressor_type; + PyTypeObject *lzma_decompressor_type; + PyObject *error; + PyObject *empty_tuple; +} _lzma_state; + +static inline _lzma_state* +get_lzma_state(PyObject *module) +{ + void *state = PyModule_GetState(module); + assert(state != NULL); + return (_lzma_state *)state; +} /* Container formats: */ enum { @@ -56,17 +70,10 @@ typedef struct { PyThread_type_lock lock; } Decompressor; -/* LZMAError class object. */ -static PyObject *Error; - -/* An empty tuple, used by the filter specifier parsing code. */ -static PyObject *empty_tuple; - - /* Helper functions. */ static int -catch_lzma_error(lzma_ret lzret) +catch_lzma_error(_lzma_state *state, lzma_ret lzret) { switch (lzret) { case LZMA_OK: @@ -75,31 +82,31 @@ catch_lzma_error(lzma_ret lzret) case LZMA_STREAM_END: return 0; case LZMA_UNSUPPORTED_CHECK: - PyErr_SetString(Error, "Unsupported integrity check"); + PyErr_SetString(state->error, "Unsupported integrity check"); return 1; case LZMA_MEM_ERROR: PyErr_NoMemory(); return 1; case LZMA_MEMLIMIT_ERROR: - PyErr_SetString(Error, "Memory usage limit exceeded"); + PyErr_SetString(state->error, "Memory usage limit exceeded"); return 1; case LZMA_FORMAT_ERROR: - PyErr_SetString(Error, "Input format not supported by decoder"); + PyErr_SetString(state->error, "Input format not supported by decoder"); return 1; case LZMA_OPTIONS_ERROR: - PyErr_SetString(Error, "Invalid or unsupported options"); + PyErr_SetString(state->error, "Invalid or unsupported options"); return 1; case LZMA_DATA_ERROR: - PyErr_SetString(Error, "Corrupt input data"); + PyErr_SetString(state->error, "Corrupt input data"); return 1; case LZMA_BUF_ERROR: - PyErr_SetString(Error, "Insufficient buffer space"); + PyErr_SetString(state->error, "Insufficient buffer space"); return 1; case LZMA_PROG_ERROR: - PyErr_SetString(Error, "Internal error"); + PyErr_SetString(state->error, "Internal error"); return 1; default: - PyErr_Format(Error, "Unrecognized error from liblzma: %d", lzret); + PyErr_Format(state->error, "Unrecognized error from liblzma: %d", lzret); return 1; } } @@ -107,8 +114,9 @@ catch_lzma_error(lzma_ret lzret) static void* PyLzma_Malloc(void *opaque, size_t items, size_t size) { - if (size != 0 && items > (size_t)PY_SSIZE_T_MAX / size) + if (size != 0 && items > (size_t)PY_SSIZE_T_MAX / size) { return NULL; + } /* PyMem_Malloc() cannot be used: the GIL is not held when lzma_code() is called */ return PyMem_RawMalloc(items * size); @@ -132,8 +140,9 @@ grow_buffer(PyObject **buf, Py_ssize_t max_length) Py_ssize_t size = PyBytes_GET_SIZE(*buf); Py_ssize_t newsize = size + (size >> 3) + 6; - if (max_length > 0 && newsize > max_length) + if (max_length > 0 && newsize > max_length) { newsize = max_length; + } return _PyBytes_Resize(buf, newsize); } @@ -186,7 +195,7 @@ INT_TYPE_CONVERTER_FUNC(lzma_match_finder, lzma_mf_converter) the C lzma_filter structs expected by liblzma. */ static void * -parse_filter_spec_lzma(PyObject *spec) +parse_filter_spec_lzma(_lzma_state *state, PyObject *spec) { static char *optnames[] = {"id", "preset", "dict_size", "lc", "lp", "pb", "mode", "nice_len", "mf", "depth", NULL}; @@ -200,28 +209,32 @@ parse_filter_spec_lzma(PyObject *spec) preset_obj = PyMapping_GetItemString(spec, "preset"); if (preset_obj == NULL) { - if (PyErr_ExceptionMatches(PyExc_KeyError)) + if (PyErr_ExceptionMatches(PyExc_KeyError)) { PyErr_Clear(); - else + } + else { return NULL; + } } else { int ok = uint32_converter(preset_obj, &preset); Py_DECREF(preset_obj); - if (!ok) + if (!ok) { return NULL; + } } options = (lzma_options_lzma *)PyMem_Calloc(1, sizeof *options); - if (options == NULL) + if (options == NULL) { return PyErr_NoMemory(); + } if (lzma_lzma_preset(options, preset)) { PyMem_Free(options); - PyErr_Format(Error, "Invalid compression preset: %u", preset); + PyErr_Format(state->error, "Invalid compression preset: %u", preset); return NULL; } - if (!PyArg_ParseTupleAndKeywords(empty_tuple, spec, + if (!PyArg_ParseTupleAndKeywords(state->empty_tuple, spec, "|OOO&O&O&O&O&O&O&O&", optnames, &id, &preset_obj, uint32_converter, &options->dict_size, @@ -235,20 +248,21 @@ parse_filter_spec_lzma(PyObject *spec) PyErr_SetString(PyExc_ValueError, "Invalid filter specifier for LZMA filter"); PyMem_Free(options); - options = NULL; + return NULL; } + return options; } static void * -parse_filter_spec_delta(PyObject *spec) +parse_filter_spec_delta(_lzma_state *state, PyObject *spec) { static char *optnames[] = {"id", "dist", NULL}; PyObject *id; uint32_t dist = 1; lzma_options_delta *options; - if (!PyArg_ParseTupleAndKeywords(empty_tuple, spec, "|OO&", optnames, + if (!PyArg_ParseTupleAndKeywords(state->empty_tuple, spec, "|OO&", optnames, &id, uint32_converter, &dist)) { PyErr_SetString(PyExc_ValueError, "Invalid filter specifier for delta filter"); @@ -256,22 +270,23 @@ parse_filter_spec_delta(PyObject *spec) } options = (lzma_options_delta *)PyMem_Calloc(1, sizeof *options); - if (options == NULL) + if (options == NULL) { return PyErr_NoMemory(); + } options->type = LZMA_DELTA_TYPE_BYTE; options->dist = dist; return options; } static void * -parse_filter_spec_bcj(PyObject *spec) +parse_filter_spec_bcj(_lzma_state *state, PyObject *spec) { static char *optnames[] = {"id", "start_offset", NULL}; PyObject *id; uint32_t start_offset = 0; lzma_options_bcj *options; - if (!PyArg_ParseTupleAndKeywords(empty_tuple, spec, "|OO&", optnames, + if (!PyArg_ParseTupleAndKeywords(state->empty_tuple, spec, "|OO&", optnames, &id, uint32_converter, &start_offset)) { PyErr_SetString(PyExc_ValueError, "Invalid filter specifier for BCJ filter"); @@ -279,14 +294,15 @@ parse_filter_spec_bcj(PyObject *spec) } options = (lzma_options_bcj *)PyMem_Calloc(1, sizeof *options); - if (options == NULL) + if (options == NULL) { return PyErr_NoMemory(); + } options->start_offset = start_offset; return options; } static int -lzma_filter_converter(PyObject *spec, void *ptr) +lzma_filter_converter(_lzma_state *state, PyObject *spec, void *ptr) { lzma_filter *f = (lzma_filter *)ptr; PyObject *id_obj; @@ -305,16 +321,17 @@ lzma_filter_converter(PyObject *spec, void *ptr) } f->id = PyLong_AsUnsignedLongLong(id_obj); Py_DECREF(id_obj); - if (PyErr_Occurred()) + if (PyErr_Occurred()) { return 0; + } switch (f->id) { case LZMA_FILTER_LZMA1: case LZMA_FILTER_LZMA2: - f->options = parse_filter_spec_lzma(spec); + f->options = parse_filter_spec_lzma(state, spec); return f->options != NULL; case LZMA_FILTER_DELTA: - f->options = parse_filter_spec_delta(spec); + f->options = parse_filter_spec_delta(state, spec); return f->options != NULL; case LZMA_FILTER_X86: case LZMA_FILTER_POWERPC: @@ -322,7 +339,7 @@ lzma_filter_converter(PyObject *spec, void *ptr) case LZMA_FILTER_ARM: case LZMA_FILTER_ARMTHUMB: case LZMA_FILTER_SPARC: - f->options = parse_filter_spec_bcj(spec); + f->options = parse_filter_spec_bcj(state, spec); return f->options != NULL; default: PyErr_Format(PyExc_ValueError, "Invalid filter ID: %llu", f->id); @@ -333,20 +350,20 @@ lzma_filter_converter(PyObject *spec, void *ptr) static void free_filter_chain(lzma_filter filters[]) { - int i; - - for (i = 0; filters[i].id != LZMA_VLI_UNKNOWN; i++) + for (int i = 0; filters[i].id != LZMA_VLI_UNKNOWN; i++) { PyMem_Free(filters[i].options); + } } static int -parse_filter_chain_spec(lzma_filter filters[], PyObject *filterspecs) +parse_filter_chain_spec(_lzma_state *state, lzma_filter filters[], PyObject *filterspecs) { Py_ssize_t i, num_filters; num_filters = PySequence_Length(filterspecs); - if (num_filters == -1) + if (num_filters == -1) { return -1; + } if (num_filters > LZMA_FILTERS_MAX) { PyErr_Format(PyExc_ValueError, "Too many filters - liblzma supports a maximum of %d", @@ -357,8 +374,9 @@ parse_filter_chain_spec(lzma_filter filters[], PyObject *filterspecs) for (i = 0; i < num_filters; i++) { int ok = 1; PyObject *spec = PySequence_GetItem(filterspecs, i); - if (spec == NULL || !lzma_filter_converter(spec, &filters[i])) + if (spec == NULL || !lzma_filter_converter(state, spec, &filters[i])) { ok = 0; + } Py_XDECREF(spec); if (!ok) { filters[i].id = LZMA_VLI_UNKNOWN; @@ -383,8 +401,9 @@ spec_add_field(PyObject *spec, _Py_Identifier *key, unsigned long long value) PyObject *value_object; value_object = PyLong_FromUnsignedLongLong(value); - if (value_object == NULL) + if (value_object == NULL) { return -1; + } status = _PyDict_SetItemId(spec, key, value_object); Py_DECREF(value_object); @@ -397,8 +416,9 @@ build_filter_spec(const lzma_filter *f) PyObject *spec; spec = PyDict_New(); - if (spec == NULL) + if (spec == NULL) { return NULL; + } #define ADD_FIELD(SOURCE, FIELD) \ do { \ @@ -492,10 +512,13 @@ compress(Compressor *c, uint8_t *data, size_t len, lzma_action action) { Py_ssize_t data_size = 0; PyObject *result; + _lzma_state *state = PyType_GetModuleState(Py_TYPE(c)); + assert(state != NULL); result = PyBytes_FromStringAndSize(NULL, INITIAL_BUFFER_SIZE); - if (result == NULL) + if (result == NULL) { return NULL; + } c->lzs.next_in = data; c->lzs.avail_in = len; c->lzs.next_out = (uint8_t *)PyBytes_AS_STRING(result); @@ -506,11 +529,13 @@ compress(Compressor *c, uint8_t *data, size_t len, lzma_action action) Py_BEGIN_ALLOW_THREADS lzret = lzma_code(&c->lzs, action); data_size = (char *)c->lzs.next_out - PyBytes_AS_STRING(result); - if (lzret == LZMA_BUF_ERROR && len == 0 && c->lzs.avail_out > 0) + if (lzret == LZMA_BUF_ERROR && len == 0 && c->lzs.avail_out > 0) { lzret = LZMA_OK; /* That wasn't a real error */ + } Py_END_ALLOW_THREADS - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { goto error; + } if ((action == LZMA_RUN && c->lzs.avail_in == 0) || (action == LZMA_FINISH && lzret == LZMA_STREAM_END)) { break; @@ -522,8 +547,9 @@ compress(Compressor *c, uint8_t *data, size_t len, lzma_action action) } } if (data_size != PyBytes_GET_SIZE(result)) - if (_PyBytes_Resize(&result, data_size) == -1) + if (_PyBytes_Resize(&result, data_size) == -1) { goto error; + } return result; error: @@ -552,10 +578,12 @@ _lzma_LZMACompressor_compress_impl(Compressor *self, Py_buffer *data) PyObject *result = NULL; ACQUIRE_LOCK(self); - if (self->flushed) + if (self->flushed) { PyErr_SetString(PyExc_ValueError, "Compressor has been flushed"); - else + } + else { result = compress(self, data->buf, data->len, LZMA_RUN); + } RELEASE_LOCK(self); return result; } @@ -588,8 +616,8 @@ _lzma_LZMACompressor_flush_impl(Compressor *self) } static int -Compressor_init_xz(lzma_stream *lzs, int check, uint32_t preset, - PyObject *filterspecs) +Compressor_init_xz(_lzma_state *state, lzma_stream *lzs, + int check, uint32_t preset, PyObject *filterspecs) { lzma_ret lzret; @@ -598,19 +626,21 @@ Compressor_init_xz(lzma_stream *lzs, int check, uint32_t preset, } else { lzma_filter filters[LZMA_FILTERS_MAX + 1]; - if (parse_filter_chain_spec(filters, filterspecs) == -1) + if (parse_filter_chain_spec(state, filters, filterspecs) == -1) return -1; lzret = lzma_stream_encoder(lzs, filters, check); free_filter_chain(filters); } - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { return -1; - else + } + else { return 0; + } } static int -Compressor_init_alone(lzma_stream *lzs, uint32_t preset, PyObject *filterspecs) +Compressor_init_alone(_lzma_state *state, lzma_stream *lzs, uint32_t preset, PyObject *filterspecs) { lzma_ret lzret; @@ -618,14 +648,14 @@ Compressor_init_alone(lzma_stream *lzs, uint32_t preset, PyObject *filterspecs) lzma_options_lzma options; if (lzma_lzma_preset(&options, preset)) { - PyErr_Format(Error, "Invalid compression preset: %u", preset); + PyErr_Format(state->error, "Invalid compression preset: %u", preset); return -1; } lzret = lzma_alone_encoder(lzs, &options); } else { lzma_filter filters[LZMA_FILTERS_MAX + 1]; - if (parse_filter_chain_spec(filters, filterspecs) == -1) + if (parse_filter_chain_spec(state, filters, filterspecs) == -1) return -1; if (filters[0].id == LZMA_FILTER_LZMA1 && filters[1].id == LZMA_VLI_UNKNOWN) { @@ -638,14 +668,16 @@ Compressor_init_alone(lzma_stream *lzs, uint32_t preset, PyObject *filterspecs) } free_filter_chain(filters); } - if (PyErr_Occurred() || catch_lzma_error(lzret)) + if (PyErr_Occurred() || catch_lzma_error(state, lzret)) { return -1; - else + } + else { return 0; + } } static int -Compressor_init_raw(lzma_stream *lzs, PyObject *filterspecs) +Compressor_init_raw(_lzma_state *state, lzma_stream *lzs, PyObject *filterspecs) { lzma_filter filters[LZMA_FILTERS_MAX + 1]; lzma_ret lzret; @@ -655,14 +687,17 @@ Compressor_init_raw(lzma_stream *lzs, PyObject *filterspecs) "Must specify filters for FORMAT_RAW"); return -1; } - if (parse_filter_chain_spec(filters, filterspecs) == -1) + if (parse_filter_chain_spec(state, filters, filterspecs) == -1) { return -1; + } lzret = lzma_raw_encoder(lzs, filters); free_filter_chain(filters); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { return -1; - else + } + else { return 0; + } } /*[-clinic input] @@ -706,12 +741,14 @@ Compressor_init(Compressor *self, PyObject *args, PyObject *kwargs) uint32_t preset = LZMA_PRESET_DEFAULT; PyObject *preset_obj = Py_None; PyObject *filterspecs = Py_None; - + _lzma_state *state = PyType_GetModuleState(Py_TYPE(self)); + assert(state != NULL); if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|iiOO:LZMACompressor", arg_names, &format, &check, &preset_obj, - &filterspecs)) + &filterspecs)) { return -1; + } if (format != FORMAT_XZ && check != -1 && check != LZMA_CHECK_NONE) { PyErr_SetString(PyExc_ValueError, @@ -725,9 +762,11 @@ Compressor_init(Compressor *self, PyObject *args, PyObject *kwargs) return -1; } - if (preset_obj != Py_None) - if (!uint32_converter(preset_obj, &preset)) + if (preset_obj != Py_None) { + if (!uint32_converter(preset_obj, &preset)) { return -1; + } + } self->alloc.opaque = NULL; self->alloc.alloc = PyLzma_Malloc; @@ -743,20 +782,24 @@ Compressor_init(Compressor *self, PyObject *args, PyObject *kwargs) self->flushed = 0; switch (format) { case FORMAT_XZ: - if (check == -1) + if (check == -1) { check = LZMA_CHECK_CRC64; - if (Compressor_init_xz(&self->lzs, check, preset, filterspecs) != 0) + } + if (Compressor_init_xz(state, &self->lzs, check, preset, filterspecs) != 0) { break; + } return 0; case FORMAT_ALONE: - if (Compressor_init_alone(&self->lzs, preset, filterspecs) != 0) + if (Compressor_init_alone(state, &self->lzs, preset, filterspecs) != 0) { break; + } return 0; case FORMAT_RAW: - if (Compressor_init_raw(&self->lzs, filterspecs) != 0) + if (Compressor_init_raw(state, &self->lzs, filterspecs) != 0) { break; + } return 0; default: @@ -774,17 +817,42 @@ static void Compressor_dealloc(Compressor *self) { lzma_end(&self->lzs); - if (self->lock != NULL) + if (self->lock != NULL) { PyThread_free_lock(self->lock); - Py_TYPE(self)->tp_free((PyObject *)self); + } + PyTypeObject *tp = Py_TYPE(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); +} + +/*[clinic input] +_lzma.LZMACompressor.__reduce__ +[clinic start generated code]*/ + +static PyObject * +_lzma_LZMACompressor___reduce___impl(Compressor *self) +/*[clinic end generated code: output=b49a0538d1cad752 input=6be52aba16b513c1]*/ +{ + PyErr_Format(PyExc_TypeError, + "cannot pickle %s object", + Py_TYPE(self)->tp_name); + return NULL; } static PyMethodDef Compressor_methods[] = { _LZMA_LZMACOMPRESSOR_COMPRESS_METHODDEF _LZMA_LZMACOMPRESSOR_FLUSH_METHODDEF + _LZMA_LZMACOMPRESSOR___REDUCE___METHODDEF {NULL} }; +static int +Compressor_traverse(Compressor *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + PyDoc_STRVAR(Compressor_doc, "LZMACompressor(format=FORMAT_XZ, check=-1, preset=None, filters=None)\n" "\n" @@ -813,47 +881,26 @@ PyDoc_STRVAR(Compressor_doc, "\n" "For one-shot compression, use the compress() function instead.\n"); -static PyTypeObject Compressor_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_lzma.LZMACompressor", /* tp_name */ - sizeof(Compressor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)Compressor_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - Compressor_doc, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Compressor_methods, /* tp_methods */ - 0, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - (initproc)Compressor_init, /* tp_init */ - 0, /* tp_alloc */ - PyType_GenericNew, /* tp_new */ +static PyType_Slot lzma_compressor_type_slots[] = { + {Py_tp_dealloc, Compressor_dealloc}, + {Py_tp_methods, Compressor_methods}, + {Py_tp_init, Compressor_init}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_doc, (char *)Compressor_doc}, + {Py_tp_traverse, Compressor_traverse}, + {0, 0} }; +static PyType_Spec lzma_compressor_type_spec = { + .name = "_lzma.LZMACompressor", + .basicsize = sizeof(Compressor), + // Calling PyType_GetModuleState() on a subclass is not safe. + // lzma_compressor_type_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = Py_TPFLAGS_DEFAULT, + .slots = lzma_compressor_type_slots, +}; /* LZMADecompressor class. */ @@ -867,13 +914,18 @@ decompress_buf(Decompressor *d, Py_ssize_t max_length) Py_ssize_t data_size = 0; PyObject *result; lzma_stream *lzs = &d->lzs; + _lzma_state *state = PyType_GetModuleState(Py_TYPE(d)); + assert(state != NULL); - if (max_length < 0 || max_length >= INITIAL_BUFFER_SIZE) + if (max_length < 0 || max_length >= INITIAL_BUFFER_SIZE) { result = PyBytes_FromStringAndSize(NULL, INITIAL_BUFFER_SIZE); - else + } + else { result = PyBytes_FromStringAndSize(NULL, max_length); - if (result == NULL) + } + if (result == NULL) { return NULL; + } lzs->next_out = (uint8_t *)PyBytes_AS_STRING(result); lzs->avail_out = PyBytes_GET_SIZE(result); @@ -884,14 +936,17 @@ decompress_buf(Decompressor *d, Py_ssize_t max_length) Py_BEGIN_ALLOW_THREADS lzret = lzma_code(lzs, LZMA_RUN); data_size = (char *)lzs->next_out - PyBytes_AS_STRING(result); - if (lzret == LZMA_BUF_ERROR && lzs->avail_in == 0 && lzs->avail_out > 0) + if (lzret == LZMA_BUF_ERROR && lzs->avail_in == 0 && lzs->avail_out > 0) { lzret = LZMA_OK; /* That wasn't a real error */ + } Py_END_ALLOW_THREADS - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { goto error; - if (lzret == LZMA_GET_CHECK || lzret == LZMA_NO_CHECK) + } + if (lzret == LZMA_GET_CHECK || lzret == LZMA_NO_CHECK) { d->check = lzma_get_check(&d->lzs); + } if (lzret == LZMA_STREAM_END) { d->eof = 1; break; @@ -900,19 +955,23 @@ decompress_buf(Decompressor *d, Py_ssize_t max_length) Maybe lzs's internal state still have a few bytes can be output, grow the output buffer and continue if max_lengh < 0. */ - if (data_size == max_length) + if (data_size == max_length) { break; - if (grow_buffer(&result, max_length) == -1) + } + if (grow_buffer(&result, max_length) == -1) { goto error; + } lzs->next_out = (uint8_t *)PyBytes_AS_STRING(result) + data_size; lzs->avail_out = PyBytes_GET_SIZE(result) - data_size; } else if (lzs->avail_in == 0) { break; } } - if (data_size != PyBytes_GET_SIZE(result)) - if (_PyBytes_Resize(&result, data_size) == -1) + if (data_size != PyBytes_GET_SIZE(result)) { + if (_PyBytes_Resize(&result, data_size) == -1) { goto error; + } + } return result; @@ -984,8 +1043,9 @@ decompress(Decompressor *d, uint8_t *data, size_t len, Py_ssize_t max_length) if (lzs->avail_in > 0) { Py_XSETREF(d->unused_data, PyBytes_FromStringAndSize((char *)lzs->next_in, lzs->avail_in)); - if (d->unused_data == NULL) + if (d->unused_data == NULL) { goto error; + } } } else if (lzs->avail_in == 0) { @@ -1082,19 +1142,22 @@ _lzma_LZMADecompressor_decompress_impl(Decompressor *self, Py_buffer *data, } static int -Decompressor_init_raw(lzma_stream *lzs, PyObject *filterspecs) +Decompressor_init_raw(_lzma_state *state, lzma_stream *lzs, PyObject *filterspecs) { lzma_filter filters[LZMA_FILTERS_MAX + 1]; lzma_ret lzret; - if (parse_filter_chain_spec(filters, filterspecs) == -1) + if (parse_filter_chain_spec(state, filters, filterspecs) == -1) { return -1; + } lzret = lzma_raw_decoder(lzs, filters); free_filter_chain(filters); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { return -1; - else + } + else { return 0; + } } /*[clinic input] @@ -1130,6 +1193,8 @@ _lzma_LZMADecompressor___init___impl(Decompressor *self, int format, const uint32_t decoder_flags = LZMA_TELL_ANY_CHECK | LZMA_TELL_NO_CHECK; uint64_t memlimit_ = UINT64_MAX; lzma_ret lzret; + _lzma_state *state = PyType_GetModuleState(Py_TYPE(self)); + assert(state != NULL); if (memlimit != Py_None) { if (format == FORMAT_RAW) { @@ -1138,8 +1203,9 @@ _lzma_LZMADecompressor___init___impl(Decompressor *self, int format, return -1; } memlimit_ = PyLong_AsUnsignedLongLong(memlimit); - if (PyErr_Occurred()) + if (PyErr_Occurred()) { return -1; + } } if (format == FORMAT_RAW && filters == Py_None) { @@ -1173,33 +1239,38 @@ _lzma_LZMADecompressor___init___impl(Decompressor *self, int format, self->input_buffer = NULL; self->input_buffer_size = 0; Py_XSETREF(self->unused_data, PyBytes_FromStringAndSize(NULL, 0)); - if (self->unused_data == NULL) + if (self->unused_data == NULL) { goto error; + } switch (format) { case FORMAT_AUTO: lzret = lzma_auto_decoder(&self->lzs, memlimit_, decoder_flags); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { break; + } return 0; case FORMAT_XZ: lzret = lzma_stream_decoder(&self->lzs, memlimit_, decoder_flags); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { break; + } return 0; case FORMAT_ALONE: self->check = LZMA_CHECK_NONE; lzret = lzma_alone_decoder(&self->lzs, memlimit_); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { break; + } return 0; case FORMAT_RAW: self->check = LZMA_CHECK_NONE; - if (Decompressor_init_raw(&self->lzs, filters) == -1) + if (Decompressor_init_raw(state, &self->lzs, filters) == -1) { break; + } return 0; default: @@ -1223,13 +1294,38 @@ Decompressor_dealloc(Decompressor *self) lzma_end(&self->lzs); Py_CLEAR(self->unused_data); - if (self->lock != NULL) + if (self->lock != NULL) { PyThread_free_lock(self->lock); - Py_TYPE(self)->tp_free((PyObject *)self); + } + PyTypeObject *tp = Py_TYPE(self); + tp->tp_free((PyObject *)self); + Py_DECREF(tp); +} + +static int +Decompressor_traverse(Decompressor *self, visitproc visit, void *arg) +{ + Py_VISIT(Py_TYPE(self)); + return 0; +} + +/*[clinic input] +_lzma.LZMADecompressor.__reduce__ +[clinic start generated code]*/ + +static PyObject * +_lzma_LZMADecompressor___reduce___impl(Decompressor *self) +/*[clinic end generated code: output=2611fff0104a9c30 input=b9882e030aecd9a5]*/ +{ + PyErr_Format(PyExc_TypeError, + "cannot pickle %s object", + Py_TYPE(self)->tp_name); + return NULL; } static PyMethodDef Decompressor_methods[] = { _LZMA_LZMADECOMPRESSOR_DECOMPRESS_METHODDEF + _LZMA_LZMADECOMPRESSOR___REDUCE___METHODDEF {NULL} }; @@ -1257,45 +1353,26 @@ static PyMemberDef Decompressor_members[] = { {NULL} }; -static PyTypeObject Decompressor_type = { - PyVarObject_HEAD_INIT(NULL, 0) - "_lzma.LZMADecompressor", /* tp_name */ - sizeof(Decompressor), /* tp_basicsize */ - 0, /* tp_itemsize */ - (destructor)Decompressor_dealloc, /* tp_dealloc */ - 0, /* tp_vectorcall_offset */ - 0, /* tp_getattr */ - 0, /* tp_setattr */ - 0, /* tp_as_async */ - 0, /* tp_repr */ - 0, /* tp_as_number */ - 0, /* tp_as_sequence */ - 0, /* tp_as_mapping */ - 0, /* tp_hash */ - 0, /* tp_call */ - 0, /* tp_str */ - 0, /* tp_getattro */ - 0, /* tp_setattro */ - 0, /* tp_as_buffer */ - Py_TPFLAGS_DEFAULT, /* tp_flags */ - _lzma_LZMADecompressor___init____doc__, /* tp_doc */ - 0, /* tp_traverse */ - 0, /* tp_clear */ - 0, /* tp_richcompare */ - 0, /* tp_weaklistoffset */ - 0, /* tp_iter */ - 0, /* tp_iternext */ - Decompressor_methods, /* tp_methods */ - Decompressor_members, /* tp_members */ - 0, /* tp_getset */ - 0, /* tp_base */ - 0, /* tp_dict */ - 0, /* tp_descr_get */ - 0, /* tp_descr_set */ - 0, /* tp_dictoffset */ - _lzma_LZMADecompressor___init__, /* tp_init */ - 0, /* tp_alloc */ - PyType_GenericNew, /* tp_new */ +static PyType_Slot lzma_decompressor_type_slots[] = { + {Py_tp_dealloc, Decompressor_dealloc}, + {Py_tp_methods, Decompressor_methods}, + {Py_tp_init, _lzma_LZMADecompressor___init__}, + {Py_tp_new, PyType_GenericNew}, + {Py_tp_doc, (char *)_lzma_LZMADecompressor___init____doc__}, + {Py_tp_traverse, Decompressor_traverse}, + {Py_tp_members, Decompressor_members}, + {0, 0} +}; + +static PyType_Spec lzma_decompressor_type_spec = { + .name = "_lzma.LZMADecompressor", + .basicsize = sizeof(Decompressor), + // Calling PyType_GetModuleState() on a subclass is not safe. + // lzma_decompressor_type_spec does not have Py_TPFLAGS_BASETYPE flag + // which prevents to create a subclass. + // So calling PyType_GetModuleState() in this file is always safe. + .flags = Py_TPFLAGS_DEFAULT, + .slots = lzma_decompressor_type_slots, }; @@ -1318,27 +1395,52 @@ _lzma_is_check_supported_impl(PyObject *module, int check_id) return PyBool_FromLong(lzma_check_is_supported(check_id)); } +PyDoc_STRVAR(_lzma__encode_filter_properties__doc__, +"_encode_filter_properties($module, filter, /)\n" +"--\n" +"\n" +"Return a bytes object encoding the options (properties) of the filter specified by *filter* (a dict).\n" +"\n" +"The result does not include the filter ID itself, only the options."); -/*[clinic input] -_lzma._encode_filter_properties - filter: lzma_filter(c_default="{LZMA_VLI_UNKNOWN, NULL}") - / +#define _LZMA__ENCODE_FILTER_PROPERTIES_METHODDEF \ + {"_encode_filter_properties", (PyCFunction)_lzma__encode_filter_properties, METH_O, _lzma__encode_filter_properties__doc__}, -Return a bytes object encoding the options (properties) of the filter specified by *filter* (a dict). +static PyObject * +_lzma__encode_filter_properties_impl(PyObject *module, lzma_filter filter); -The result does not include the filter ID itself, only the options. -[clinic start generated code]*/ +static PyObject * +_lzma__encode_filter_properties(PyObject *module, PyObject *arg) +{ + PyObject *return_value = NULL; + lzma_filter filter = {LZMA_VLI_UNKNOWN, NULL}; + _lzma_state *state = get_lzma_state(module); + assert(state != NULL); + if (!lzma_filter_converter(state, arg, &filter)) { + goto exit; + } + return_value = _lzma__encode_filter_properties_impl(module, filter); + +exit: + /* Cleanup for filter */ + if (filter.id != LZMA_VLI_UNKNOWN) { + PyMem_Free(filter.options); + } + + return return_value; +} static PyObject * _lzma__encode_filter_properties_impl(PyObject *module, lzma_filter filter) -/*[clinic end generated code: output=5c93c8e14e7be5a8 input=d4c64f1b557c77d4]*/ { lzma_ret lzret; uint32_t encoded_size; PyObject *result = NULL; + _lzma_state *state = get_lzma_state(module); + assert(state != NULL); lzret = lzma_properties_size(&encoded_size, &filter); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) goto error; result = PyBytes_FromStringAndSize(NULL, encoded_size); @@ -1347,8 +1449,9 @@ _lzma__encode_filter_properties_impl(PyObject *module, lzma_filter filter) lzret = lzma_properties_encode( &filter, (uint8_t *)PyBytes_AS_STRING(result)); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { goto error; + } return result; @@ -1378,11 +1481,14 @@ _lzma__decode_filter_properties_impl(PyObject *module, lzma_vli filter_id, lzma_ret lzret; PyObject *result = NULL; filter.id = filter_id; + _lzma_state *state = get_lzma_state(module); + assert(state != NULL); lzret = lzma_properties_decode( &filter, NULL, encoded_props->buf, encoded_props->len); - if (catch_lzma_error(lzret)) + if (catch_lzma_error(state, lzret)) { return NULL; + } result = build_filter_spec(&filter); @@ -1392,103 +1498,162 @@ _lzma__decode_filter_properties_impl(PyObject *module, lzma_vli filter_id, return result; } - -/* Module initialization. */ - -static PyMethodDef module_methods[] = { - _LZMA_IS_CHECK_SUPPORTED_METHODDEF - _LZMA__ENCODE_FILTER_PROPERTIES_METHODDEF - _LZMA__DECODE_FILTER_PROPERTIES_METHODDEF - {NULL} -}; - -static PyModuleDef _lzmamodule = { - PyModuleDef_HEAD_INIT, - "_lzma", - NULL, - -1, - module_methods, - NULL, - NULL, - NULL, - NULL, -}; - /* Some of our constants are more than 32 bits wide, so PyModule_AddIntConstant would not work correctly on platforms with 32-bit longs. */ static int module_add_int_constant(PyObject *m, const char *name, long long value) { PyObject *o = PyLong_FromLongLong(value); - if (o == NULL) + if (o == NULL) { return -1; - if (PyModule_AddObject(m, name, o) == 0) + } + if (PyModule_AddObject(m, name, o) == 0) { return 0; + } Py_DECREF(o); return -1; } -#define ADD_INT_PREFIX_MACRO(m, macro) \ - module_add_int_constant(m, #macro, LZMA_ ## macro) - -PyMODINIT_FUNC -PyInit__lzma(void) +static int +lzma_exec(PyObject *module) { - PyObject *m; +#define ADD_INT_PREFIX_MACRO(module, macro) \ + do { \ + if (module_add_int_constant(module, #macro, LZMA_ ## macro) < 0) { \ + return -1; \ + } \ + } while(0) + +#define ADD_INT_MACRO(module, macro) \ + do { \ + if (PyModule_AddIntMacro(module, macro) < 0) { \ + return -1; \ + } \ + } while (0) - empty_tuple = PyTuple_New(0); - if (empty_tuple == NULL) - return NULL; - m = PyModule_Create(&_lzmamodule); - if (m == NULL) - return NULL; + _lzma_state *state = get_lzma_state(module); - if (PyModule_AddIntMacro(m, FORMAT_AUTO) == -1 || - PyModule_AddIntMacro(m, FORMAT_XZ) == -1 || - PyModule_AddIntMacro(m, FORMAT_ALONE) == -1 || - PyModule_AddIntMacro(m, FORMAT_RAW) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_NONE) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_CRC32) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_CRC64) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_SHA256) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_ID_MAX) == -1 || - ADD_INT_PREFIX_MACRO(m, CHECK_UNKNOWN) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_LZMA1) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_LZMA2) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_DELTA) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_X86) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_IA64) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_ARM) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_ARMTHUMB) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_SPARC) == -1 || - ADD_INT_PREFIX_MACRO(m, FILTER_POWERPC) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_HC3) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_HC4) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_BT2) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_BT3) == -1 || - ADD_INT_PREFIX_MACRO(m, MF_BT4) == -1 || - ADD_INT_PREFIX_MACRO(m, MODE_FAST) == -1 || - ADD_INT_PREFIX_MACRO(m, MODE_NORMAL) == -1 || - ADD_INT_PREFIX_MACRO(m, PRESET_DEFAULT) == -1 || - ADD_INT_PREFIX_MACRO(m, PRESET_EXTREME) == -1) - return NULL; + state->empty_tuple = PyTuple_New(0); + if (state->empty_tuple == NULL) { + return -1; + } - Error = PyErr_NewExceptionWithDoc( - "_lzma.LZMAError", "Call to liblzma failed.", NULL, NULL); - if (Error == NULL) - return NULL; - Py_INCREF(Error); - if (PyModule_AddObject(m, "LZMAError", Error) == -1) - return NULL; + ADD_INT_MACRO(module, FORMAT_AUTO); + ADD_INT_MACRO(module, FORMAT_XZ); + ADD_INT_MACRO(module, FORMAT_ALONE); + ADD_INT_MACRO(module, FORMAT_RAW); + ADD_INT_PREFIX_MACRO(module, CHECK_NONE); + ADD_INT_PREFIX_MACRO(module, CHECK_CRC32); + ADD_INT_PREFIX_MACRO(module, CHECK_CRC64); + ADD_INT_PREFIX_MACRO(module, CHECK_SHA256); + ADD_INT_PREFIX_MACRO(module, CHECK_ID_MAX); + ADD_INT_PREFIX_MACRO(module, CHECK_UNKNOWN); + ADD_INT_PREFIX_MACRO(module, FILTER_LZMA1); + ADD_INT_PREFIX_MACRO(module, FILTER_LZMA2); + ADD_INT_PREFIX_MACRO(module, FILTER_DELTA); + ADD_INT_PREFIX_MACRO(module, FILTER_X86); + ADD_INT_PREFIX_MACRO(module, FILTER_IA64); + ADD_INT_PREFIX_MACRO(module, FILTER_ARM); + ADD_INT_PREFIX_MACRO(module, FILTER_ARMTHUMB); + ADD_INT_PREFIX_MACRO(module, FILTER_SPARC); + ADD_INT_PREFIX_MACRO(module, FILTER_POWERPC); + ADD_INT_PREFIX_MACRO(module, MF_HC3); + ADD_INT_PREFIX_MACRO(module, MF_HC4); + ADD_INT_PREFIX_MACRO(module, MF_BT2); + ADD_INT_PREFIX_MACRO(module, MF_BT3); + ADD_INT_PREFIX_MACRO(module, MF_BT4); + ADD_INT_PREFIX_MACRO(module, MODE_FAST); + ADD_INT_PREFIX_MACRO(module, MODE_NORMAL); + ADD_INT_PREFIX_MACRO(module, PRESET_DEFAULT); + ADD_INT_PREFIX_MACRO(module, PRESET_EXTREME); + + state->error = PyErr_NewExceptionWithDoc("_lzma.LZMAError", "Call to liblzma failed.", NULL, NULL); + if (state->error == NULL) { + return -1; + } - if (PyModule_AddType(m, &Compressor_type) < 0) { - return NULL; + if (PyModule_AddType(module, (PyTypeObject *)state->error) < 0) { + return -1; } - if (PyModule_AddType(m, &Decompressor_type) < 0) { - return NULL; + + state->lzma_compressor_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &lzma_compressor_type_spec, NULL); + if (state->lzma_compressor_type == NULL) { + return -1; + } + + if (PyModule_AddType(module, state->lzma_compressor_type) < 0) { + return -1; + } + + state->lzma_decompressor_type = (PyTypeObject *)PyType_FromModuleAndSpec(module, + &lzma_decompressor_type_spec, NULL); + if (state->lzma_decompressor_type == NULL) { + return -1; + } + + if (PyModule_AddType(module, state->lzma_decompressor_type) < 0) { + return -1; } - return m; + return 0; +} + +static PyMethodDef lzma_methods[] = { + _LZMA_IS_CHECK_SUPPORTED_METHODDEF + _LZMA__ENCODE_FILTER_PROPERTIES_METHODDEF + _LZMA__DECODE_FILTER_PROPERTIES_METHODDEF + {NULL} +}; + +static PyModuleDef_Slot lzma_slots[] = { + {Py_mod_exec, lzma_exec}, + {0, NULL} +}; + +static int +lzma_traverse(PyObject *module, visitproc visit, void *arg) +{ + _lzma_state *state = get_lzma_state(module); + Py_VISIT(state->lzma_compressor_type); + Py_VISIT(state->lzma_decompressor_type); + Py_VISIT(state->error); + Py_VISIT(state->empty_tuple); + return 0; +} + +static int +lzma_clear(PyObject *module) +{ + _lzma_state *state = get_lzma_state(module); + Py_CLEAR(state->lzma_compressor_type); + Py_CLEAR(state->lzma_decompressor_type); + Py_CLEAR(state->error); + Py_CLEAR(state->empty_tuple); + return 0; +} + +static void +lzma_free(void *module) +{ + lzma_clear((PyObject *)module); +} + +static PyModuleDef _lzmamodule = { + PyModuleDef_HEAD_INIT, + .m_name = "_lzma", + .m_size = sizeof(_lzma_state), + .m_methods = lzma_methods, + .m_slots = lzma_slots, + .m_traverse = lzma_traverse, + .m_clear = lzma_clear, + .m_free = lzma_free, +}; + +PyMODINIT_FUNC +PyInit__lzma(void) +{ + return PyModuleDef_Init(&_lzmamodule); } diff --git a/Modules/clinic/_lzmamodule.c.h b/Modules/clinic/_lzmamodule.c.h index e4e0a7945a8fb..e15cc0c7e743b 100644 --- a/Modules/clinic/_lzmamodule.c.h +++ b/Modules/clinic/_lzmamodule.c.h @@ -65,6 +65,23 @@ _lzma_LZMACompressor_flush(Compressor *self, PyObject *Py_UNUSED(ignored)) return _lzma_LZMACompressor_flush_impl(self); } +PyDoc_STRVAR(_lzma_LZMACompressor___reduce____doc__, +"__reduce__($self, /)\n" +"--\n" +"\n"); + +#define _LZMA_LZMACOMPRESSOR___REDUCE___METHODDEF \ + {"__reduce__", (PyCFunction)_lzma_LZMACompressor___reduce__, METH_NOARGS, _lzma_LZMACompressor___reduce____doc__}, + +static PyObject * +_lzma_LZMACompressor___reduce___impl(Compressor *self); + +static PyObject * +_lzma_LZMACompressor___reduce__(Compressor *self, PyObject *Py_UNUSED(ignored)) +{ + return _lzma_LZMACompressor___reduce___impl(self); +} + PyDoc_STRVAR(_lzma_LZMADecompressor_decompress__doc__, "decompress($self, /, data, max_length=-1)\n" "--\n" @@ -211,6 +228,23 @@ _lzma_LZMADecompressor___init__(PyObject *self, PyObject *args, PyObject *kwargs return return_value; } +PyDoc_STRVAR(_lzma_LZMADecompressor___reduce____doc__, +"__reduce__($self, /)\n" +"--\n" +"\n"); + +#define _LZMA_LZMADECOMPRESSOR___REDUCE___METHODDEF \ + {"__reduce__", (PyCFunction)_lzma_LZMADecompressor___reduce__, METH_NOARGS, _lzma_LZMADecompressor___reduce____doc__}, + +static PyObject * +_lzma_LZMADecompressor___reduce___impl(Decompressor *self); + +static PyObject * +_lzma_LZMADecompressor___reduce__(Decompressor *self, PyObject *Py_UNUSED(ignored)) +{ + return _lzma_LZMADecompressor___reduce___impl(self); +} + PyDoc_STRVAR(_lzma_is_check_supported__doc__, "is_check_supported($module, check_id, /)\n" "--\n" @@ -241,39 +275,6 @@ _lzma_is_check_supported(PyObject *module, PyObject *arg) return return_value; } -PyDoc_STRVAR(_lzma__encode_filter_properties__doc__, -"_encode_filter_properties($module, filter, /)\n" -"--\n" -"\n" -"Return a bytes object encoding the options (properties) of the filter specified by *filter* (a dict).\n" -"\n" -"The result does not include the filter ID itself, only the options."); - -#define _LZMA__ENCODE_FILTER_PROPERTIES_METHODDEF \ - {"_encode_filter_properties", (PyCFunction)_lzma__encode_filter_properties, METH_O, _lzma__encode_filter_properties__doc__}, - -static PyObject * -_lzma__encode_filter_properties_impl(PyObject *module, lzma_filter filter); - -static PyObject * -_lzma__encode_filter_properties(PyObject *module, PyObject *arg) -{ - PyObject *return_value = NULL; - lzma_filter filter = {LZMA_VLI_UNKNOWN, NULL}; - - if (!lzma_filter_converter(arg, &filter)) { - goto exit; - } - return_value = _lzma__encode_filter_properties_impl(module, filter); - -exit: - /* Cleanup for filter */ - if (filter.id != LZMA_VLI_UNKNOWN) - PyMem_Free(filter.options); - - return return_value; -} - PyDoc_STRVAR(_lzma__decode_filter_properties__doc__, "_decode_filter_properties($module, filter_id, encoded_props, /)\n" "--\n" @@ -319,4 +320,4 @@ _lzma__decode_filter_properties(PyObject *module, PyObject *const *args, Py_ssiz return return_value; } -/*[clinic end generated code: output=d6e997ebc269f78f input=a9049054013a1b77]*/ +/*[clinic end generated code: output=d89b6159e98544be input=a9049054013a1b77]*/ From webhook-mailer at python.org Mon Jun 22 12:02:57 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 22 Jun 2020 16:02:57 -0000 Subject: [Python-checkins] bpo-41078: Fix bltinmodule.c with Py_TRACE_REFS (GH-21058) Message-ID: https://github.com/python/cpython/commit/c96d00e88ead8f99bb6aa1357928ac4545d9287c commit: c96d00e88ead8f99bb6aa1357928ac4545d9287c branch: master author: Victor Stinner committer: GitHub date: 2020-06-22T18:02:49+02:00 summary: bpo-41078: Fix bltinmodule.c with Py_TRACE_REFS (GH-21058) Add pycore_object.h include to fix bltinmodule.c when Py_TRACE_REFS macro is defined. files: M Python/bltinmodule.c diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c index a582ccda2c384..89b7fce8f4a9c 100644 --- a/Python/bltinmodule.c +++ b/Python/bltinmodule.c @@ -4,6 +4,7 @@ #include #include "ast.h" #undef Yield /* undefine macro conflicting with */ +#include "pycore_object.h" // _Py_AddToAllObjects() #include "pycore_pyerrors.h" // _PyErr_NoMemory() #include "pycore_pystate.h" // _PyThreadState_GET() #include "pycore_tuple.h" // _PyTuple_FromArray() From webhook-mailer at python.org Mon Jun 22 22:16:10 2020 From: webhook-mailer at python.org (Roger Iyengar) Date: Tue, 23 Jun 2020 02:16:10 -0000 Subject: [Python-checkins] Improve asyncio.loop.call_soon() documentation (GH-20883) Message-ID: https://github.com/python/cpython/commit/a16d6970496cae5ecab5aaea2b416a4b77527cc2 commit: a16d6970496cae5ecab5aaea2b416a4b77527cc2 branch: master author: Roger Iyengar committer: GitHub date: 2020-06-22T22:16:00-04:00 summary: Improve asyncio.loop.call_soon() documentation (GH-20883) * Add a glossary entry for the term "callback" * Link to it in loop.call_soon() and in the "Concurrency and Multithreading" section Co-authored-by: Kyle Stanley files: M Doc/glossary.rst M Doc/library/asyncio-dev.rst M Doc/library/asyncio-eventloop.rst diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 6189cb045049c..e997d366777b3 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -189,6 +189,10 @@ Glossary A list of bytecode instructions can be found in the documentation for :ref:`the dis module `. + callback + A subroutine function which is passed as an argument to be executed at + some point in the future. + class A template for creating user-defined objects. Class definitions normally contain method definitions which operate on instances of the diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index ff51c4fa3b20f..77f8067197836 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -73,7 +73,7 @@ event loop, no other Tasks can run in the same thread. When a Task executes an ``await`` expression, the running Task gets suspended, and the event loop executes the next Task. -To schedule a callback from a different OS thread, the +To schedule a :term:`callback` from another OS thread, the :meth:`loop.call_soon_threadsafe` method should be used. Example:: loop.call_soon_threadsafe(callback, *args) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index d60a6ce95cdd8..b1e73189a7a4c 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -191,8 +191,8 @@ Scheduling callbacks .. method:: loop.call_soon(callback, *args, context=None) - Schedule a *callback* to be called with *args* arguments at - the next iteration of the event loop. + Schedule the *callback* :term:`callback` to be called with + *args* arguments at the next iteration of the event loop. Callbacks are called in the order in which they are registered. Each callback will be called exactly once. From webhook-mailer at python.org Mon Jun 22 22:23:54 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 23 Jun 2020 02:23:54 -0000 Subject: [Python-checkins] Improve asyncio.loop.call_soon() documentation (GH-20883) Message-ID: https://github.com/python/cpython/commit/56d25add07093701c4827ea3a46b7025d9030f3c commit: 56d25add07093701c4827ea3a46b7025d9030f3c branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-22T19:23:49-07:00 summary: Improve asyncio.loop.call_soon() documentation (GH-20883) * Add a glossary entry for the term "callback" * Link to it in loop.call_soon() and in the "Concurrency and Multithreading" section Co-authored-by: Kyle Stanley (cherry picked from commit a16d6970496cae5ecab5aaea2b416a4b77527cc2) Co-authored-by: Roger Iyengar files: M Doc/glossary.rst M Doc/library/asyncio-dev.rst M Doc/library/asyncio-eventloop.rst diff --git a/Doc/glossary.rst b/Doc/glossary.rst index 6189cb045049c..e997d366777b3 100644 --- a/Doc/glossary.rst +++ b/Doc/glossary.rst @@ -189,6 +189,10 @@ Glossary A list of bytecode instructions can be found in the documentation for :ref:`the dis module `. + callback + A subroutine function which is passed as an argument to be executed at + some point in the future. + class A template for creating user-defined objects. Class definitions normally contain method definitions which operate on instances of the diff --git a/Doc/library/asyncio-dev.rst b/Doc/library/asyncio-dev.rst index 101e7817a95e9..0d302ea49b0de 100644 --- a/Doc/library/asyncio-dev.rst +++ b/Doc/library/asyncio-dev.rst @@ -73,7 +73,7 @@ event loop, no other Tasks can run in the same thread. When a Task executes an ``await`` expression, the running Task gets suspended, and the event loop executes the next Task. -To schedule a callback from a different OS thread, the +To schedule a :term:`callback` from another OS thread, the :meth:`loop.call_soon_threadsafe` method should be used. Example:: loop.call_soon_threadsafe(callback, *args) diff --git a/Doc/library/asyncio-eventloop.rst b/Doc/library/asyncio-eventloop.rst index 9022993e619a5..32bc219cf5c37 100644 --- a/Doc/library/asyncio-eventloop.rst +++ b/Doc/library/asyncio-eventloop.rst @@ -179,8 +179,8 @@ Scheduling callbacks .. method:: loop.call_soon(callback, *args, context=None) - Schedule a *callback* to be called with *args* arguments at - the next iteration of the event loop. + Schedule the *callback* :term:`callback` to be called with + *args* arguments at the next iteration of the event loop. Callbacks are called in the order in which they are registered. Each callback will be called exactly once. From webhook-mailer at python.org Mon Jun 22 22:39:08 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Jun 2020 02:39:08 -0000 Subject: [Python-checkins] Small clean-ups for the random module (GH-21038) Message-ID: https://github.com/python/cpython/commit/26a1ad1c24717990265b71ed093d691500d6301c commit: 26a1ad1c24717990265b71ed093d691500d6301c branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-22T19:38:59-07:00 summary: Small clean-ups for the random module (GH-21038) files: M Lib/random.py diff --git a/Lib/random.py b/Lib/random.py index 02a56c6935b89..ae7b5cf4e72e8 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -39,7 +39,8 @@ from warnings import warn as _warn from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil -from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin, tau as TWOPI +from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin +from math import tau as TWOPI, floor as _floor from os import urandom as _urandom from _collections_abc import Set as _Set, Sequence as _Sequence from itertools import accumulate as _accumulate, repeat as _repeat @@ -234,7 +235,7 @@ def __reduce__(self): ## -------------------- integer methods ------------------- - def randrange(self, start, stop=None, step=1, _int=int): + def randrange(self, start, stop=None, step=1): """Choose a random item from range(start, stop[, step]). This fixes the problem with randint() which includes the @@ -244,7 +245,7 @@ def randrange(self, start, stop=None, step=1, _int=int): # This code is a bit messy to make it fast for the # common case while still doing adequate error checking. - istart = _int(start) + istart = int(start) if istart != start: raise ValueError("non-integer arg 1 for randrange()") if stop is None: @@ -253,7 +254,7 @@ def randrange(self, start, stop=None, step=1, _int=int): raise ValueError("empty range for randrange()") # stop argument supplied. - istop = _int(stop) + istop = int(stop) if istop != stop: raise ValueError("non-integer stop for randrange()") width = istop - istart @@ -263,7 +264,7 @@ def randrange(self, start, stop=None, step=1, _int=int): raise ValueError("empty range for randrange() (%d, %d, %d)" % (istart, istop, width)) # Non-unit step argument supplied. - istep = _int(step) + istep = int(step) if istep != step: raise ValueError("non-integer step for randrange()") if istep > 0: @@ -296,7 +297,7 @@ def _randbelow_with_getrandbits(self, n): r = getrandbits(k) return r - def _randbelow_without_getrandbits(self, n, int=int, maxsize=1<= limit: r = random() - return int(r * maxsize) % n + return _floor(r * maxsize) % n _randbelow = _randbelow_with_getrandbits @@ -346,10 +347,10 @@ def shuffle(self, x, random=None): 'since Python 3.9 and will be removed in a subsequent ' 'version.', DeprecationWarning, 2) - _int = int + floor = _floor for i in reversed(range(1, len(x))): # pick an element in x[:i+1] with which to exchange x[i] - j = _int(random() * (i + 1)) + j = floor(random() * (i + 1)) x[i], x[j] = x[j], x[i] def sample(self, population, k, *, counts=None): @@ -462,9 +463,9 @@ def choices(self, population, weights=None, *, cum_weights=None, k=1): n = len(population) if cum_weights is None: if weights is None: - _int = int + floor = _floor n += 0.0 # convert to float for a small speed improvement - return [population[_int(random() * n)] for i in _repeat(None, k)] + return [population[floor(random() * n)] for i in _repeat(None, k)] cum_weights = list(_accumulate(weights)) elif weights is not None: raise TypeError('Cannot specify both weights and cumulative weights') @@ -814,24 +815,20 @@ def _notimplemented(self, *args, **kwds): ## -------------------- test program -------------------- def _test_generator(n, func, args): - import time - print(n, 'times', func.__name__) - total = 0.0 - sqsum = 0.0 - smallest = 1e10 - largest = -1e10 - t0 = time.perf_counter() - for i in range(n): - x = func(*args) - total += x - sqsum = sqsum + x*x - smallest = min(x, smallest) - largest = max(x, largest) - t1 = time.perf_counter() - print(round(t1 - t0, 3), 'sec,', end=' ') - avg = total / n - stddev = _sqrt(sqsum / n - avg * avg) - print('avg %g, stddev %g, min %g, max %g\n' % (avg, stddev, smallest, largest)) + from statistics import stdev, fmean as mean + from time import perf_counter + + t0 = perf_counter() + data = [func(*args) for i in range(n)] + t1 = perf_counter() + + xbar = mean(data) + sigma = stdev(data, xbar) + low = min(data) + high = max(data) + + print(f'{t1 - t0:.3f} sec, {n} times {func.__name__}') + print('avg %g, stddev %g, min %g, max %g\n' % (xbar, sigma, low, high)) def _test(N=2000): From webhook-mailer at python.org Tue Jun 23 05:33:34 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 09:33:34 -0000 Subject: [Python-checkins] bpo-40521: Make dict free lists per-interpreter (GH-20645) Message-ID: https://github.com/python/cpython/commit/b4e85cadfbc2b1b24ec5f3159e351dbacedaa5e0 commit: b4e85cadfbc2b1b24ec5f3159e351dbacedaa5e0 branch: master author: Victor Stinner committer: GitHub date: 2020-06-23T11:33:18+02:00 summary: bpo-40521: Make dict free lists per-interpreter (GH-20645) Each interpreter now has its own dict free list: * Move dict free lists into PyInterpreterState. * Move PyDict_MAXFREELIST define to pycore_interp.h * Add _Py_dict_state structure. * Add tstate parameter to _PyDict_ClearFreeList() and _PyDict_Fini(). * In debug mode, ensure that the dict free lists are not used after _PyDict_Fini() is called. * Remove "#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS". files: M Include/internal/pycore_gc.h M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Modules/gcmodule.c M Objects/dictobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_gc.h b/Include/internal/pycore_gc.h index fd3fb7f94cab0..da202a1df532e 100644 --- a/Include/internal/pycore_gc.h +++ b/Include/internal/pycore_gc.h @@ -169,7 +169,7 @@ extern void _PyFrame_ClearFreeList(PyThreadState *tstate); extern void _PyTuple_ClearFreeList(PyThreadState *tstate); extern void _PyFloat_ClearFreeList(PyThreadState *tstate); extern void _PyList_ClearFreeList(PyThreadState *tstate); -extern void _PyDict_ClearFreeList(void); +extern void _PyDict_ClearFreeList(PyThreadState *tstate); extern void _PyAsyncGen_ClearFreeLists(PyThreadState *tstate); extern void _PyContext_ClearFreeList(PyThreadState *tstate); diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 981b73340b7ea..3f64edcee983b 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -69,6 +69,14 @@ struct _Py_unicode_state { struct _Py_unicode_fs_codec fs_codec; }; +struct _Py_float_state { + /* Special free list + free_list is a singly-linked list of available PyFloatObjects, + linked via abuse of their ob_type members. */ + int numfree; + PyFloatObject *free_list; +}; + /* Speed optimization to avoid frequent malloc/free of small tuples */ #ifndef PyTuple_MAXSAVESIZE // Largest tuple to save on free list @@ -99,12 +107,16 @@ struct _Py_list_state { int numfree; }; -struct _Py_float_state { - /* Special free list - free_list is a singly-linked list of available PyFloatObjects, - linked via abuse of their ob_type members. */ +#ifndef PyDict_MAXFREELIST +# define PyDict_MAXFREELIST 80 +#endif + +struct _Py_dict_state { + /* Dictionary reuse scheme to save calls to malloc and free */ + PyDictObject *free_list[PyDict_MAXFREELIST]; int numfree; - PyFloatObject *free_list; + PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; + int keys_numfree; }; struct _Py_frame_state { @@ -136,7 +148,6 @@ struct _Py_context_state { }; - /* interpreter state */ #define _PY_NSMALLPOSINTS 257 @@ -182,8 +193,6 @@ struct _is { PyObject *codec_error_registry; int codecs_initialized; - struct _Py_unicode_state unicode; - PyConfig config; #ifdef HAVE_DLOPEN int dlopenflags; @@ -224,16 +233,18 @@ struct _is { */ PyLongObject* small_ints[_PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS]; #endif + struct _Py_unicode_state unicode; + struct _Py_float_state float_state; + /* Using a cache is very effective since typically only a single slice is + created and then deleted again. */ + PySliceObject *slice_cache; + struct _Py_tuple_state tuple; struct _Py_list_state list; - struct _Py_float_state float_state; + struct _Py_dict_state dict_state; struct _Py_frame_state frame; struct _Py_async_gen_state async_gen; struct _Py_context_state context; - - /* Using a cache is very effective since typically only a single slice is - created and then deleted again. */ - PySliceObject *slice_cache; }; /* Used by _PyImport_Cleanup() */ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 3e3657339a4a4..dc99737829772 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -59,7 +59,7 @@ extern PyStatus _PyGC_Init(PyThreadState *tstate); /* Various internal finalizers */ extern void _PyFrame_Fini(PyThreadState *tstate); -extern void _PyDict_Fini(void); +extern void _PyDict_Fini(PyThreadState *tstate); extern void _PyTuple_Fini(PyThreadState *tstate); extern void _PyList_Fini(PyThreadState *tstate); extern void _PySet_Fini(void); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 39cb80447f6a9..3406ca8c973d8 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,4 +1,5 @@ The tuple free lists, the empty tuple singleton, the list free list, the float -free list, the slice cache, the frame free list, the asynchronous generator -free lists, and the context free list are no longer shared by all interpreters: -each interpreter now its has own free lists and caches. +free list, the slice cache, the dict free lists, the frame free list, the +asynchronous generator free lists, and the context free list are no longer +shared by all interpreters: each interpreter now its has own free lists and +caches. diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c index 110a48d8cd76f..8833400caba75 100644 --- a/Modules/gcmodule.c +++ b/Modules/gcmodule.c @@ -1038,7 +1038,7 @@ clear_freelists(PyThreadState *tstate) _PyTuple_ClearFreeList(tstate); _PyFloat_ClearFreeList(tstate); _PyList_ClearFreeList(tstate); - _PyDict_ClearFreeList(); + _PyDict_ClearFreeList(tstate); _PyAsyncGen_ClearFreeLists(tstate); _PyContext_ClearFreeList(tstate); } diff --git a/Objects/dictobject.c b/Objects/dictobject.c index 55bf4aefbbeac..f3b1157177655 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -247,58 +247,47 @@ static uint64_t pydict_global_version = 0; #define DICT_NEXT_VERSION() (++pydict_global_version) -/* Dictionary reuse scheme to save calls to malloc and free */ -#ifndef PyDict_MAXFREELIST -#define PyDict_MAXFREELIST 80 -#endif - -/* bpo-40521: dict free lists are shared by all interpreters. */ -#ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS -# undef PyDict_MAXFREELIST -# define PyDict_MAXFREELIST 0 -#endif - -#if PyDict_MAXFREELIST > 0 -static PyDictObject *free_list[PyDict_MAXFREELIST]; -static int numfree = 0; -static PyDictKeysObject *keys_free_list[PyDict_MAXFREELIST]; -static int numfreekeys = 0; -#endif - #include "clinic/dictobject.c.h" void -_PyDict_ClearFreeList(void) +_PyDict_ClearFreeList(PyThreadState *tstate) { -#if PyDict_MAXFREELIST > 0 - while (numfree) { - PyDictObject *op = free_list[--numfree]; + struct _Py_dict_state *state = &tstate->interp->dict_state; + while (state->numfree) { + PyDictObject *op = state->free_list[--state->numfree]; assert(PyDict_CheckExact(op)); PyObject_GC_Del(op); } - while (numfreekeys) { - PyObject_FREE(keys_free_list[--numfreekeys]); + while (state->keys_numfree) { + PyObject_FREE(state->keys_free_list[--state->keys_numfree]); } -#endif } -/* Print summary info about the state of the optimized allocator */ + void -_PyDict_DebugMallocStats(FILE *out) +_PyDict_Fini(PyThreadState *tstate) { -#if PyDict_MAXFREELIST > 0 - _PyDebugAllocatorStats(out, - "free PyDictObject", numfree, sizeof(PyDictObject)); + _PyDict_ClearFreeList(tstate); +#ifdef Py_DEBUG + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_dict_state *state = &interp->dict_state; + state->numfree = -1; + state->keys_numfree = -1; #endif } +/* Print summary info about the state of the optimized allocator */ void -_PyDict_Fini(void) +_PyDict_DebugMallocStats(FILE *out) { - _PyDict_ClearFreeList(); + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_dict_state *state = &interp->dict_state; + _PyDebugAllocatorStats(out, "free PyDictObject", + state->numfree, sizeof(PyDictObject)); } + #define DK_SIZE(dk) ((dk)->dk_size) #if SIZEOF_VOID_P > 4 #define DK_IXSIZE(dk) \ @@ -543,7 +532,8 @@ _PyDict_CheckConsistency(PyObject *op, int check_content) } -static PyDictKeysObject *new_keys_object(Py_ssize_t size) +static PyDictKeysObject* +new_keys_object(Py_ssize_t size) { PyDictKeysObject *dk; Py_ssize_t es, usable; @@ -567,12 +557,16 @@ static PyDictKeysObject *new_keys_object(Py_ssize_t size) es = sizeof(Py_ssize_t); } -#if PyDict_MAXFREELIST > 0 - if (size == PyDict_MINSIZE && numfreekeys > 0) { - dk = keys_free_list[--numfreekeys]; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_dict_state *state = &interp->dict_state; +#ifdef Py_DEBUG + // new_keys_object() must not be called after _PyDict_Fini() + assert(state->keys_numfree != -1); +#endif + if (size == PyDict_MINSIZE && state->keys_numfree > 0) { + dk = state->keys_free_list[--state->keys_numfree]; } else -#endif { dk = PyObject_MALLOC(sizeof(PyDictKeysObject) + es * size @@ -604,12 +598,16 @@ free_keys_object(PyDictKeysObject *keys) Py_XDECREF(entries[i].me_key); Py_XDECREF(entries[i].me_value); } -#if PyDict_MAXFREELIST > 0 - if (keys->dk_size == PyDict_MINSIZE && numfreekeys < PyDict_MAXFREELIST) { - keys_free_list[numfreekeys++] = keys; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_dict_state *state = &interp->dict_state; +#ifdef Py_DEBUG + // free_keys_object() must not be called after _PyDict_Fini() + assert(state->keys_numfree != -1); +#endif + if (keys->dk_size == PyDict_MINSIZE && state->keys_numfree < PyDict_MAXFREELIST) { + state->keys_free_list[state->keys_numfree++] = keys; return; } -#endif PyObject_FREE(keys); } @@ -622,16 +620,19 @@ new_dict(PyDictKeysObject *keys, PyObject **values) { PyDictObject *mp; assert(keys != NULL); -#if PyDict_MAXFREELIST > 0 - if (numfree) { - mp = free_list[--numfree]; + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_dict_state *state = &interp->dict_state; +#ifdef Py_DEBUG + // new_dict() must not be called after _PyDict_Fini() + assert(state->numfree != -1); +#endif + if (state->numfree) { + mp = state->free_list[--state->numfree]; assert (mp != NULL); assert (Py_IS_TYPE(mp, &PyDict_Type)); _Py_NewReference((PyObject *)mp); } - else -#endif - { + else { mp = PyObject_GC_New(PyDictObject, &PyDict_Type); if (mp == NULL) { dictkeys_decref(keys); @@ -1280,15 +1281,18 @@ dictresize(PyDictObject *mp, Py_ssize_t minsize) #ifdef Py_REF_DEBUG _Py_RefTotal--; #endif -#if PyDict_MAXFREELIST > 0 + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_dict_state *state = &interp->dict_state; +#ifdef Py_DEBUG + // dictresize() must not be called after _PyDict_Fini() + assert(state->keys_numfree != -1); +#endif if (oldkeys->dk_size == PyDict_MINSIZE && - numfreekeys < PyDict_MAXFREELIST) + state->keys_numfree < PyDict_MAXFREELIST) { - keys_free_list[numfreekeys++] = oldkeys; + state->keys_free_list[state->keys_numfree++] = oldkeys; } - else -#endif - { + else { PyObject_FREE(oldkeys); } } @@ -2028,13 +2032,16 @@ dict_dealloc(PyDictObject *mp) assert(keys->dk_refcnt == 1); dictkeys_decref(keys); } -#if PyDict_MAXFREELIST > 0 - if (numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) { - free_list[numfree++] = mp; - } - else + PyInterpreterState *interp = _PyInterpreterState_GET(); + struct _Py_dict_state *state = &interp->dict_state; +#ifdef Py_DEBUG + // new_dict() must not be called after _PyDict_Fini() + assert(state->numfree != -1); #endif - { + if (state->numfree < PyDict_MAXFREELIST && Py_IS_TYPE(mp, &PyDict_Type)) { + state->free_list[state->numfree++] = mp; + } + else { Py_TYPE(mp)->tp_free((PyObject *)mp); } Py_TRASHCAN_END diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 87f25e623f570..1b4a3db517c1d 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1258,9 +1258,7 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) if (is_main_interp) { _PySet_Fini(); } - if (is_main_interp) { - _PyDict_Fini(); - } + _PyDict_Fini(tstate); _PyList_Fini(tstate); _PyTuple_Fini(tstate); From webhook-mailer at python.org Tue Jun 23 08:08:01 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 12:08:01 -0000 Subject: [Python-checkins] bpo-40521: Make the empty frozenset per interpreter (GH-21068) Message-ID: https://github.com/python/cpython/commit/261cfedf7657a515e04428bba58eba2a9bb88208 commit: 261cfedf7657a515e04428bba58eba2a9bb88208 branch: master author: Victor Stinner committer: GitHub date: 2020-06-23T14:07:52+02:00 summary: bpo-40521: Make the empty frozenset per interpreter (GH-21068) Each interpreter now has its own empty frozenset singleton. files: M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Objects/setobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 3f64edcee983b..697d97a39e01f 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -238,6 +238,8 @@ struct _is { /* Using a cache is very effective since typically only a single slice is created and then deleted again. */ PySliceObject *slice_cache; + // The empty frozenset is a singleton. + PyObject *empty_frozenset; struct _Py_tuple_state tuple; struct _Py_list_state list; diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index dc99737829772..83ce1d2e7468c 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -62,7 +62,7 @@ extern void _PyFrame_Fini(PyThreadState *tstate); extern void _PyDict_Fini(PyThreadState *tstate); extern void _PyTuple_Fini(PyThreadState *tstate); extern void _PyList_Fini(PyThreadState *tstate); -extern void _PySet_Fini(void); +extern void _PySet_Fini(PyThreadState *tstate); extern void _PyBytes_Fini(void); extern void _PyFloat_Fini(PyThreadState *tstate); extern void _PySlice_Fini(PyThreadState *tstate); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 3406ca8c973d8..24fd437062a51 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,5 +1,5 @@ -The tuple free lists, the empty tuple singleton, the list free list, the float -free list, the slice cache, the dict free lists, the frame free list, the -asynchronous generator free lists, and the context free list are no longer -shared by all interpreters: each interpreter now its has own free lists and -caches. +The tuple free lists, the empty tuple singleton, the list free list, the empty +frozenset singleton, the float free list, the slice cache, the dict free lists, +the frame free list, the asynchronous generator free lists, and the context +free list are no longer shared by all interpreters: each interpreter now its +has own free lists and caches. diff --git a/Objects/setobject.c b/Objects/setobject.c index 76b1944db4558..69bfc7d0a58fb 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -975,12 +975,11 @@ make_new_set_basetype(PyTypeObject *type, PyObject *iterable) return make_new_set(type, iterable); } -/* The empty frozenset is a singleton */ -static PyObject *emptyfrozenset = NULL; - static PyObject * make_new_frozenset(PyTypeObject *type, PyObject *iterable) { + PyObject *res; + if (type != &PyFrozenSet_Type) { return make_new_set(type, iterable); } @@ -991,7 +990,7 @@ make_new_frozenset(PyTypeObject *type, PyObject *iterable) Py_INCREF(iterable); return iterable; } - PyObject *res = make_new_set((PyTypeObject *)type, iterable); + res = make_new_set((PyTypeObject *)type, iterable); if (res == NULL || PySet_GET_SIZE(res) != 0) { return res; } @@ -1000,11 +999,17 @@ make_new_frozenset(PyTypeObject *type, PyObject *iterable) } // The empty frozenset is a singleton - if (emptyfrozenset == NULL) { - emptyfrozenset = make_new_set((PyTypeObject *)type, NULL); + PyInterpreterState *interp = _PyInterpreterState_GET(); + res = interp->empty_frozenset; + if (res == NULL) { + interp->empty_frozenset = make_new_set((PyTypeObject *)type, NULL); + res = interp->empty_frozenset; + if (res == NULL) { + return NULL; + } } - Py_XINCREF(emptyfrozenset); - return emptyfrozenset; + Py_INCREF(res); + return res; } static PyObject * @@ -2300,9 +2305,9 @@ PySet_Add(PyObject *anyset, PyObject *key) } void -_PySet_Fini(void) +_PySet_Fini(PyThreadState *tstate) { - Py_CLEAR(emptyfrozenset); + Py_CLEAR(tstate->interp->empty_frozenset); } int diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 1b4a3db517c1d..aaea0454d0084 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1255,9 +1255,7 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) _PyAsyncGen_Fini(tstate); _PyContext_Fini(tstate); - if (is_main_interp) { - _PySet_Fini(); - } + _PySet_Fini(tstate); _PyDict_Fini(tstate); _PyList_Fini(tstate); _PyTuple_Fini(tstate); From webhook-mailer at python.org Tue Jun 23 09:21:21 2020 From: webhook-mailer at python.org (WildCard65) Date: Tue, 23 Jun 2020 13:21:21 -0000 Subject: [Python-checkins] bpo-41085: Fix array.array.index() on 64-bit Windows (GH-21071) Message-ID: https://github.com/python/cpython/commit/1d3dad5f96ed445b958ec53dfa0d46812f2162d9 commit: 1d3dad5f96ed445b958ec53dfa0d46812f2162d9 branch: master author: WildCard65 committer: GitHub date: 2020-06-23T15:21:16+02:00 summary: bpo-41085: Fix array.array.index() on 64-bit Windows (GH-21071) Fix integer overflow in the :meth:`array.array.index` method on 64-bit Windows for index larger than ``2**31``. files: A Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst M Modules/arraymodule.c diff --git a/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst b/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst new file mode 100644 index 0000000000000..463dffdd653ee --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst @@ -0,0 +1,2 @@ +Fix integer overflow in the :meth:`array.array.index` method on 64-bit Windows +for index larger than ``2**31``. diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 8f12c61646335..2d498c7e82941 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -1130,7 +1130,7 @@ array_array_index(arrayobject *self, PyObject *v) cmp = PyObject_RichCompareBool(selfi, v, Py_EQ); Py_DECREF(selfi); if (cmp > 0) { - return PyLong_FromLong((long)i); + return PyLong_FromSsize_t(i); } else if (cmp < 0) return NULL; From webhook-mailer at python.org Tue Jun 23 09:40:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 23 Jun 2020 13:40:56 -0000 Subject: [Python-checkins] bpo-41085: Fix array.array.index() on 64-bit Windows (GH-21071) Message-ID: https://github.com/python/cpython/commit/c6e24e7420a03a1751004e255a6f6c14265b9ea1 commit: c6e24e7420a03a1751004e255a6f6c14265b9ea1 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-23T06:40:47-07:00 summary: bpo-41085: Fix array.array.index() on 64-bit Windows (GH-21071) Fix integer overflow in the :meth:`array.array.index` method on 64-bit Windows for index larger than ``2**31``. (cherry picked from commit 1d3dad5f96ed445b958ec53dfa0d46812f2162d9) Co-authored-by: WildCard65 files: A Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst M Modules/arraymodule.c diff --git a/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst b/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst new file mode 100644 index 0000000000000..463dffdd653ee --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst @@ -0,0 +1,2 @@ +Fix integer overflow in the :meth:`array.array.index` method on 64-bit Windows +for index larger than ``2**31``. diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c index 5289ea0e91a61..abcdd1e8a6e53 100644 --- a/Modules/arraymodule.c +++ b/Modules/arraymodule.c @@ -1136,7 +1136,7 @@ array_array_index(arrayobject *self, PyObject *v) cmp = PyObject_RichCompareBool(selfi, v, Py_EQ); Py_DECREF(selfi); if (cmp > 0) { - return PyLong_FromLong((long)i); + return PyLong_FromSsize_t(i); } else if (cmp < 0) return NULL; From webhook-mailer at python.org Tue Jun 23 09:50:24 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Jun 2020 13:50:24 -0000 Subject: [Python-checkins] bpo-40521: Remove freelist from collections.deque() (GH-21073) Message-ID: https://github.com/python/cpython/commit/32f2eda85957365d208f499b730d30b7eb419741 commit: 32f2eda85957365d208f499b730d30b7eb419741 branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-23T06:50:15-07:00 summary: bpo-40521: Remove freelist from collections.deque() (GH-21073) files: A Misc/NEWS.d/next/Library/2020-06-23-06-09-59.bpo-40521.HUfxP7.rst M Modules/_collectionsmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-06-23-06-09-59.bpo-40521.HUfxP7.rst b/Misc/NEWS.d/next/Library/2020-06-23-06-09-59.bpo-40521.HUfxP7.rst new file mode 100644 index 0000000000000..7689a1470b034 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-23-06-09-59.bpo-40521.HUfxP7.rst @@ -0,0 +1 @@ +Remove freelist from collections.deque(). diff --git a/Modules/_collectionsmodule.c b/Modules/_collectionsmodule.c index 7120e4dda0ed2..00198ff3eb7dd 100644 --- a/Modules/_collectionsmodule.c +++ b/Modules/_collectionsmodule.c @@ -117,23 +117,9 @@ static PyTypeObject deque_type; #define CHECK_NOT_END(link) #endif -/* A simple freelisting scheme is used to minimize calls to the memory - allocator. It accommodates common use cases where new blocks are being - added at about the same rate as old blocks are being freed. - */ - -#define MAXFREEBLOCKS 16 -static Py_ssize_t numfreeblocks = 0; -static block *freeblocks[MAXFREEBLOCKS]; - static block * newblock(void) { - block *b; - if (numfreeblocks) { - numfreeblocks--; - return freeblocks[numfreeblocks]; - } - b = PyMem_Malloc(sizeof(block)); + block *b = PyMem_Malloc(sizeof(block)); if (b != NULL) { return b; } @@ -144,12 +130,7 @@ newblock(void) { static void freeblock(block *b) { - if (numfreeblocks < MAXFREEBLOCKS) { - freeblocks[numfreeblocks] = b; - numfreeblocks++; - } else { - PyMem_Free(b); - } + PyMem_Free(b); } static PyObject * From webhook-mailer at python.org Tue Jun 23 09:54:45 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 13:54:45 -0000 Subject: [Python-checkins] bpo-40521: Make bytes singletons per interpreter (GH-21074) Message-ID: https://github.com/python/cpython/commit/c41eed1a874e2f22bde45c3c89418414b7a37f46 commit: c41eed1a874e2f22bde45c3c89418414b7a37f46 branch: master author: Victor Stinner committer: GitHub date: 2020-06-23T15:54:35+02:00 summary: bpo-40521: Make bytes singletons per interpreter (GH-21074) Each interpreter now has its own empty bytes string and single byte character singletons. Replace STRINGLIB_EMPTY macro with STRINGLIB_GET_EMPTY() macro. files: M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Objects/bytesobject.c M Objects/stringlib/README.txt M Objects/stringlib/asciilib.h M Objects/stringlib/partition.h M Objects/stringlib/stringdefs.h M Objects/stringlib/ucs1lib.h M Objects/stringlib/ucs2lib.h M Objects/stringlib/ucs4lib.h M Objects/stringlib/unicodedefs.h M Python/pylifecycle.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 697d97a39e01f..64e891f9f6eb4 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -65,6 +65,11 @@ struct _Py_unicode_fs_codec { _Py_error_handler error_handler; }; +struct _Py_bytes_state { + PyBytesObject *characters[256]; + PyBytesObject *empty_string; +}; + struct _Py_unicode_state { struct _Py_unicode_fs_codec fs_codec; }; @@ -233,6 +238,7 @@ struct _is { */ PyLongObject* small_ints[_PY_NSMALLNEGINTS + _PY_NSMALLPOSINTS]; #endif + struct _Py_bytes_state bytes; struct _Py_unicode_state unicode; struct _Py_float_state float_state; /* Using a cache is very effective since typically only a single slice is diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 83ce1d2e7468c..9a3063aa2775f 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -63,7 +63,7 @@ extern void _PyDict_Fini(PyThreadState *tstate); extern void _PyTuple_Fini(PyThreadState *tstate); extern void _PyList_Fini(PyThreadState *tstate); extern void _PySet_Fini(PyThreadState *tstate); -extern void _PyBytes_Fini(void); +extern void _PyBytes_Fini(PyThreadState *tstate); extern void _PyFloat_Fini(PyThreadState *tstate); extern void _PySlice_Fini(PyThreadState *tstate); extern void _PyAsyncGen_Fini(PyThreadState *tstate); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 24fd437062a51..95fab369748f0 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,5 +1,9 @@ -The tuple free lists, the empty tuple singleton, the list free list, the empty -frozenset singleton, the float free list, the slice cache, the dict free lists, -the frame free list, the asynchronous generator free lists, and the context -free list are no longer shared by all interpreters: each interpreter now its -has own free lists and caches. +Each interpreter now its has own free lists, singletons and caches: + +* Free lists: float, tuple, list, dict, frame, context, + asynchronous generator. +* Singletons: empty tuple, empty frozenset, empty bytes string, + single byte character. +* Slice cache. + +They are no longer shared by all interpreters. diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index d39721428634f..ce006e15dce9e 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -18,9 +18,6 @@ class bytes "PyBytesObject *" "&PyBytes_Type" #include "clinic/bytesobject.c.h" -static PyBytesObject *characters[UCHAR_MAX + 1]; -static PyBytesObject *nullstring; - _Py_IDENTIFIER(__bytes__); /* PyBytesObject_SIZE gives the basic size of a string; any memory allocation @@ -35,6 +32,15 @@ _Py_IDENTIFIER(__bytes__); Py_LOCAL_INLINE(Py_ssize_t) _PyBytesWriter_GetSize(_PyBytesWriter *writer, char *str); + +static struct _Py_bytes_state* +get_bytes_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->bytes; +} + + /* For PyBytes_FromString(), the parameter `str' points to a null-terminated string containing exactly `size' bytes. @@ -63,9 +69,13 @@ _PyBytes_FromSize(Py_ssize_t size, int use_calloc) PyBytesObject *op; assert(size >= 0); - if (size == 0 && (op = nullstring) != NULL) { - Py_INCREF(op); - return (PyObject *)op; + if (size == 0) { + struct _Py_bytes_state *state = get_bytes_state(); + op = state->empty_string; + if (op != NULL) { + Py_INCREF(op); + return (PyObject *)op; + } } if ((size_t)size > (size_t)PY_SSIZE_T_MAX - PyBytesObject_SIZE) { @@ -88,8 +98,9 @@ _PyBytes_FromSize(Py_ssize_t size, int use_calloc) op->ob_sval[size] = '\0'; /* empty byte string singleton */ if (size == 0) { - nullstring = op; + struct _Py_bytes_state *state = get_bytes_state(); Py_INCREF(op); + state->empty_string = op; } return (PyObject *) op; } @@ -103,11 +114,13 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) "Negative size passed to PyBytes_FromStringAndSize"); return NULL; } - if (size == 1 && str != NULL && - (op = characters[*str & UCHAR_MAX]) != NULL) - { - Py_INCREF(op); - return (PyObject *)op; + if (size == 1 && str != NULL) { + struct _Py_bytes_state *state = get_bytes_state(); + op = state->characters[*str & UCHAR_MAX]; + if (op != NULL) { + Py_INCREF(op); + return (PyObject *)op; + } } op = (PyBytesObject *)_PyBytes_FromSize(size, 0); @@ -119,8 +132,9 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) memcpy(op->ob_sval, str, size); /* share short strings */ if (size == 1) { - characters[*str & UCHAR_MAX] = op; + struct _Py_bytes_state *state = get_bytes_state(); Py_INCREF(op); + state->characters[*str & UCHAR_MAX] = op; } return (PyObject *) op; } @@ -138,13 +152,21 @@ PyBytes_FromString(const char *str) "byte string is too long"); return NULL; } - if (size == 0 && (op = nullstring) != NULL) { - Py_INCREF(op); - return (PyObject *)op; + + struct _Py_bytes_state *state = get_bytes_state(); + if (size == 0) { + op = state->empty_string; + if (op != NULL) { + Py_INCREF(op); + return (PyObject *)op; + } } - if (size == 1 && (op = characters[*str & UCHAR_MAX]) != NULL) { - Py_INCREF(op); - return (PyObject *)op; + else if (size == 1) { + op = state->characters[*str & UCHAR_MAX]; + if (op != NULL) { + Py_INCREF(op); + return (PyObject *)op; + } } /* Inline PyObject_NewVar */ @@ -157,11 +179,12 @@ PyBytes_FromString(const char *str) memcpy(op->ob_sval, str, size+1); /* share short strings */ if (size == 0) { - nullstring = op; Py_INCREF(op); - } else if (size == 1) { - characters[*str & UCHAR_MAX] = op; + state->empty_string = op; + } + else if (size == 1) { Py_INCREF(op); + state->characters[*str & UCHAR_MAX] = op; } return (PyObject *) op; } @@ -1249,6 +1272,8 @@ PyBytes_AsStringAndSize(PyObject *obj, /* -------------------------------------------------------------------- */ /* Methods */ +#define STRINGLIB_GET_EMPTY() get_bytes_state()->empty_string + #include "stringlib/stringdefs.h" #include "stringlib/fastsearch.h" @@ -1261,6 +1286,8 @@ PyBytes_AsStringAndSize(PyObject *obj, #include "stringlib/transmogrify.h" +#undef STRINGLIB_GET_EMPTY + PyObject * PyBytes_Repr(PyObject *obj, int smartquotes) { @@ -3058,12 +3085,13 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) } void -_PyBytes_Fini(void) +_PyBytes_Fini(PyThreadState *tstate) { - int i; - for (i = 0; i < UCHAR_MAX + 1; i++) - Py_CLEAR(characters[i]); - Py_CLEAR(nullstring); + struct _Py_bytes_state* state = &tstate->interp->bytes; + for (int i = 0; i < UCHAR_MAX + 1; i++) { + Py_CLEAR(state->characters[i]); + } + Py_CLEAR(state->empty_string); } /*********************** Bytes Iterator ****************************/ diff --git a/Objects/stringlib/README.txt b/Objects/stringlib/README.txt index 8ff6ad8c4fa0f..e1e329290acbb 100644 --- a/Objects/stringlib/README.txt +++ b/Objects/stringlib/README.txt @@ -11,10 +11,10 @@ STRINGLIB_CHAR the type used to hold a character (char or Py_UNICODE) -STRINGLIB_EMPTY +STRINGLIB_GET_EMPTY() - a PyObject representing the empty string, only to be used if - STRINGLIB_MUTABLE is 0 + returns a PyObject representing the empty string, only to be used if + STRINGLIB_MUTABLE is 0. It must not be NULL. Py_ssize_t STRINGLIB_LEN(PyObject*) diff --git a/Objects/stringlib/asciilib.h b/Objects/stringlib/asciilib.h index e69a2c076e3a3..8599d38a5a7f5 100644 --- a/Objects/stringlib/asciilib.h +++ b/Objects/stringlib/asciilib.h @@ -11,7 +11,7 @@ #define STRINGLIB_CHAR Py_UCS1 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_EMPTY unicode_empty +#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/partition.h b/Objects/stringlib/partition.h index ed32a6f2b382e..3731df56987fd 100644 --- a/Objects/stringlib/partition.h +++ b/Objects/stringlib/partition.h @@ -37,10 +37,12 @@ STRINGLIB(partition)(PyObject* str_obj, #else Py_INCREF(str_obj); PyTuple_SET_ITEM(out, 0, (PyObject*) str_obj); - Py_INCREF(STRINGLIB_EMPTY); - PyTuple_SET_ITEM(out, 1, (PyObject*) STRINGLIB_EMPTY); - Py_INCREF(STRINGLIB_EMPTY); - PyTuple_SET_ITEM(out, 2, (PyObject*) STRINGLIB_EMPTY); + PyObject *empty = (PyObject*)STRINGLIB_GET_EMPTY(); + assert(empty != NULL); + Py_INCREF(empty); + PyTuple_SET_ITEM(out, 1, empty); + Py_INCREF(empty); + PyTuple_SET_ITEM(out, 2, empty); #endif return out; } @@ -90,10 +92,12 @@ STRINGLIB(rpartition)(PyObject* str_obj, return NULL; } #else - Py_INCREF(STRINGLIB_EMPTY); - PyTuple_SET_ITEM(out, 0, (PyObject*) STRINGLIB_EMPTY); - Py_INCREF(STRINGLIB_EMPTY); - PyTuple_SET_ITEM(out, 1, (PyObject*) STRINGLIB_EMPTY); + PyObject *empty = (PyObject*)STRINGLIB_GET_EMPTY(); + assert(empty != NULL); + Py_INCREF(empty); + PyTuple_SET_ITEM(out, 0, empty); + Py_INCREF(empty); + PyTuple_SET_ITEM(out, 1, empty); Py_INCREF(str_obj); PyTuple_SET_ITEM(out, 2, (PyObject*) str_obj); #endif diff --git a/Objects/stringlib/stringdefs.h b/Objects/stringlib/stringdefs.h index ce27f3e4081f9..c12ecc59e5c6d 100644 --- a/Objects/stringlib/stringdefs.h +++ b/Objects/stringlib/stringdefs.h @@ -1,6 +1,10 @@ #ifndef STRINGLIB_STRINGDEFS_H #define STRINGLIB_STRINGDEFS_H +#ifndef STRINGLIB_GET_EMPTY +# error "STRINGLIB_GET_EMPTY macro must be defined" +#endif + /* this is sort of a hack. there's at least one place (formatting floats) where some stringlib code takes a different path if it's compiled as unicode. */ @@ -13,7 +17,6 @@ #define STRINGLIB_CHAR char #define STRINGLIB_TYPE_NAME "string" #define STRINGLIB_PARSE_CODE "S" -#define STRINGLIB_EMPTY nullstring #define STRINGLIB_ISSPACE Py_ISSPACE #define STRINGLIB_ISLINEBREAK(x) ((x == '\n') || (x == '\r')) #define STRINGLIB_ISDECIMAL(x) ((x >= '0') && (x <= '9')) diff --git a/Objects/stringlib/ucs1lib.h b/Objects/stringlib/ucs1lib.h index bc4b104f112cc..bdf30356b8457 100644 --- a/Objects/stringlib/ucs1lib.h +++ b/Objects/stringlib/ucs1lib.h @@ -11,7 +11,7 @@ #define STRINGLIB_CHAR Py_UCS1 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_EMPTY unicode_empty +#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/ucs2lib.h b/Objects/stringlib/ucs2lib.h index 86a1dff1b5637..9d6888801867d 100644 --- a/Objects/stringlib/ucs2lib.h +++ b/Objects/stringlib/ucs2lib.h @@ -11,7 +11,7 @@ #define STRINGLIB_CHAR Py_UCS2 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_EMPTY unicode_empty +#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/ucs4lib.h b/Objects/stringlib/ucs4lib.h index 3c32a93c96a1c..c7dfa527433e3 100644 --- a/Objects/stringlib/ucs4lib.h +++ b/Objects/stringlib/ucs4lib.h @@ -11,7 +11,7 @@ #define STRINGLIB_CHAR Py_UCS4 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_EMPTY unicode_empty +#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/unicodedefs.h b/Objects/stringlib/unicodedefs.h index 3db5629e11f12..e4d4163afc2f9 100644 --- a/Objects/stringlib/unicodedefs.h +++ b/Objects/stringlib/unicodedefs.h @@ -13,7 +13,7 @@ #define STRINGLIB_CHAR Py_UNICODE #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_EMPTY unicode_empty +#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index aaea0454d0084..4bb32abc4be1f 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1262,9 +1262,7 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) _PySlice_Fini(tstate); - if (is_main_interp) { - _PyBytes_Fini(); - } + _PyBytes_Fini(tstate); _PyUnicode_Fini(tstate); _PyFloat_Fini(tstate); _PyLong_Fini(tstate); From webhook-mailer at python.org Tue Jun 23 09:55:16 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 13:55:16 -0000 Subject: [Python-checkins] bpo-36710: Pass tstate explicitly in abstract.c (GH-21075) Message-ID: https://github.com/python/cpython/commit/61b649296110dcb38e67db382a7e8427c5eb75d0 commit: 61b649296110dcb38e67db382a7e8427c5eb75d0 branch: master author: Victor Stinner committer: GitHub date: 2020-06-23T15:55:06+02:00 summary: bpo-36710: Pass tstate explicitly in abstract.c (GH-21075) In functions calling more than one PyErr function, get tstate and then pass it explicitly. files: M Objects/abstract.c diff --git a/Objects/abstract.c b/Objects/abstract.c index aac42c2898cdf..b9e7111299e2f 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -3,7 +3,7 @@ #include "Python.h" #include "pycore_abstract.h" // _PyIndex_Check() #include "pycore_ceval.h" // _Py_EnterRecursiveCall() -#include "pycore_pyerrors.h" +#include "pycore_pyerrors.h" // _PyErr_Occurred() #include "pycore_pystate.h" // _PyThreadState_GET() #include #include // offsetof() @@ -23,9 +23,11 @@ type_error(const char *msg, PyObject *obj) static PyObject * null_error(void) { - if (!PyErr_Occurred()) - PyErr_SetString(PyExc_SystemError, - "null argument to internal routine"); + PyThreadState *tstate = _PyThreadState_GET(); + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_SystemError, + "null argument to internal routine"); + } return NULL; } @@ -94,11 +96,12 @@ PyObject_LengthHint(PyObject *o, Py_ssize_t defaultvalue) if (_PyObject_HasLen(o)) { res = PyObject_Length(o); if (res < 0) { - assert(PyErr_Occurred()); - if (!PyErr_ExceptionMatches(PyExc_TypeError)) { + PyThreadState *tstate = _PyThreadState_GET(); + assert(_PyErr_Occurred(tstate)); + if (!_PyErr_ExceptionMatches(tstate, PyExc_TypeError)) { return -1; } - PyErr_Clear(); + _PyErr_Clear(tstate); } else { return res; @@ -114,8 +117,9 @@ PyObject_LengthHint(PyObject *o, Py_ssize_t defaultvalue) result = _PyObject_CallNoArg(hint); Py_DECREF(hint); if (result == NULL) { - if (PyErr_ExceptionMatches(PyExc_TypeError)) { - PyErr_Clear(); + PyThreadState *tstate = _PyThreadState_GET(); + if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError)) { + _PyErr_Clear(tstate); return defaultvalue; } return -1; @@ -708,7 +712,7 @@ PyBuffer_FillInfo(Py_buffer *view, PyObject *obj, void *buf, Py_ssize_t len, { if (view == NULL) { PyErr_SetString(PyExc_BufferError, - "PyBuffer_FillInfo: view==NULL argument is obsolete"); + "PyBuffer_FillInfo: view==NULL argument is obsolete"); return -1; } @@ -790,10 +794,12 @@ PyObject_Format(PyObject *obj, PyObject *format_spec) /* Find the (unbound!) __format__ method */ meth = _PyObject_LookupSpecial(obj, &PyId___format__); if (meth == NULL) { - if (!PyErr_Occurred()) - PyErr_Format(PyExc_TypeError, - "Type %.100s doesn't define __format__", - Py_TYPE(obj)->tp_name); + PyThreadState *tstate = _PyThreadState_GET(); + if (!_PyErr_Occurred(tstate)) { + _PyErr_Format(tstate, PyExc_TypeError, + "Type %.100s doesn't define __format__", + Py_TYPE(obj)->tp_name); + } goto done; } @@ -803,8 +809,8 @@ PyObject_Format(PyObject *obj, PyObject *format_spec) if (result && !PyUnicode_Check(result)) { PyErr_Format(PyExc_TypeError, - "__format__ must return a str, not %.200s", - Py_TYPE(result)->tp_name); + "__format__ must return a str, not %.200s", + Py_TYPE(result)->tp_name); Py_DECREF(result); result = NULL; goto done; @@ -1388,17 +1394,23 @@ PyNumber_AsSsize_t(PyObject *item, PyObject *err) /* We're done if PyLong_AsSsize_t() returns without error. */ result = PyLong_AsSsize_t(value); - if (result != -1 || !(runerr = PyErr_Occurred())) + if (result != -1) + goto finish; + + PyThreadState *tstate = _PyThreadState_GET(); + runerr = _PyErr_Occurred(tstate); + if (!runerr) { goto finish; + } /* Error handling code -- only manage OverflowError differently */ - if (!PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) + if (!PyErr_GivenExceptionMatches(runerr, PyExc_OverflowError)) { goto finish; + } + _PyErr_Clear(tstate); - PyErr_Clear(); /* If no error-handling desired then the default clipping - is sufficient. - */ + is sufficient. */ if (!err) { assert(PyLong_Check(value)); /* Whether or not it is less than or equal to @@ -1411,9 +1423,9 @@ PyNumber_AsSsize_t(PyObject *item, PyObject *err) } else { /* Otherwise replace the error with caller's error object. */ - PyErr_Format(err, - "cannot fit '%.200s' into an index-sized integer", - Py_TYPE(item)->tp_name); + _PyErr_Format(tstate, err, + "cannot fit '%.200s' into an index-sized integer", + Py_TYPE(item)->tp_name); } finish: @@ -1448,8 +1460,8 @@ PyNumber_Long(PyObject *o) return result; if (!PyLong_Check(result)) { PyErr_Format(PyExc_TypeError, - "__int__ returned non-int (type %.200s)", - result->ob_type->tp_name); + "__int__ returned non-int (type %.200s)", + result->ob_type->tp_name); Py_DECREF(result); return NULL; } @@ -2052,8 +2064,10 @@ PySequence_Fast(PyObject *v, const char *m) it = PyObject_GetIter(v); if (it == NULL) { - if (PyErr_ExceptionMatches(PyExc_TypeError)) - PyErr_SetString(PyExc_TypeError, m); + PyThreadState *tstate = _PyThreadState_GET(); + if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError)) { + _PyErr_SetString(tstate, PyExc_TypeError, m); + } return NULL; } @@ -2310,12 +2324,13 @@ method_output_as_list(PyObject *o, _Py_Identifier *meth_id) } it = PyObject_GetIter(meth_output); if (it == NULL) { - if (PyErr_ExceptionMatches(PyExc_TypeError)) { - PyErr_Format(PyExc_TypeError, - "%.200s.%U() returned a non-iterable (type %.200s)", - Py_TYPE(o)->tp_name, - _PyUnicode_FromId(meth_id), - Py_TYPE(meth_output)->tp_name); + PyThreadState *tstate = _PyThreadState_GET(); + if (_PyErr_ExceptionMatches(tstate, PyExc_TypeError)) { + _PyErr_Format(tstate, PyExc_TypeError, + "%.200s.%U() returned a non-iterable (type %.200s)", + Py_TYPE(o)->tp_name, + _PyUnicode_FromId(meth_id), + Py_TYPE(meth_output)->tp_name); } Py_DECREF(meth_output); return NULL; @@ -2460,8 +2475,10 @@ check_class(PyObject *cls, const char *error) PyObject *bases = abstract_get_bases(cls); if (bases == NULL) { /* Do not mask errors. */ - if (!PyErr_Occurred()) - PyErr_SetString(PyExc_TypeError, error); + PyThreadState *tstate = _PyThreadState_GET(); + if (!_PyErr_Occurred(tstate)) { + _PyErr_SetString(tstate, PyExc_TypeError, error); + } return 0; } Py_DECREF(bases); @@ -2719,10 +2736,14 @@ PyIter_Next(PyObject *iter) { PyObject *result; result = (*Py_TYPE(iter)->tp_iternext)(iter); - if (result == NULL && - PyErr_Occurred() && - PyErr_ExceptionMatches(PyExc_StopIteration)) - PyErr_Clear(); + if (result == NULL) { + PyThreadState *tstate = _PyThreadState_GET(); + if (_PyErr_Occurred(tstate) + && _PyErr_ExceptionMatches(tstate, PyExc_StopIteration)) + { + _PyErr_Clear(tstate); + } + } return result; } From webhook-mailer at python.org Tue Jun 23 09:55:54 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 13:55:54 -0000 Subject: [Python-checkins] Call _PyWarnings_InitState() in subinterpreters (GH-21078) Message-ID: https://github.com/python/cpython/commit/30a89338eb377df08ea8724809101d14612a32eb commit: 30a89338eb377df08ea8724809101d14612a32eb branch: master author: Victor Stinner committer: GitHub date: 2020-06-23T15:55:45+02:00 summary: Call _PyWarnings_InitState() in subinterpreters (GH-21078) Py_InitializeFromConfig() now calls also _PyWarnings_InitState() in subinterpreters. files: M Python/pylifecycle.c diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 4bb32abc4be1f..f0770727f4de7 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -685,24 +685,22 @@ pycore_init_import_warnings(PyThreadState *tstate, PyObject *sysmod) return status; } - const PyConfig *config = _PyInterpreterState_GetConfig(tstate->interp); - if (_Py_IsMainInterpreter(tstate)) { - /* Initialize _warnings. */ - status = _PyWarnings_InitState(tstate); - if (_PyStatus_EXCEPTION(status)) { - return status; - } + /* Initialize _warnings. */ + status = _PyWarnings_InitState(tstate); + if (_PyStatus_EXCEPTION(status)) { + return status; + } - if (config->_install_importlib) { + const PyConfig *config = _PyInterpreterState_GetConfig(tstate->interp); + if (config->_install_importlib) { + if (_Py_IsMainInterpreter(tstate)) { status = _PyConfig_WritePathConfig(config); if (_PyStatus_EXCEPTION(status)) { return status; } } - } - /* This call sets up builtin and frozen import support */ - if (config->_install_importlib) { + /* This call sets up builtin and frozen import support */ status = init_importlib(tstate, sysmod); if (_PyStatus_EXCEPTION(status)) { return status; From webhook-mailer at python.org Tue Jun 23 10:21:36 2020 From: webhook-mailer at python.org (Ram Rachum) Date: Tue, 23 Jun 2020 14:21:36 -0000 Subject: [Python-checkins] bpo-41065: Use zip-strict in zoneinfo (GH-21031) Message-ID: https://github.com/python/cpython/commit/bc43f6e21244f31d25896875430174cd4ac7604c commit: bc43f6e21244f31d25896875430174cd4ac7604c branch: master author: Ram Rachum committer: GitHub date: 2020-06-23T10:21:26-04:00 summary: bpo-41065: Use zip-strict in zoneinfo (GH-21031) files: M Lib/zoneinfo/_common.py diff --git a/Lib/zoneinfo/_common.py b/Lib/zoneinfo/_common.py index 41c898f37e4f8..4c24f01bd7b27 100644 --- a/Lib/zoneinfo/_common.py +++ b/Lib/zoneinfo/_common.py @@ -136,8 +136,7 @@ class _TZifHeader: ] def __init__(self, *args): - assert len(self.__slots__) == len(args) - for attr, val in zip(self.__slots__, args): + for attr, val in zip(self.__slots__, args, strict=True): setattr(self, attr, val) @classmethod From webhook-mailer at python.org Tue Jun 23 10:40:50 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 14:40:50 -0000 Subject: [Python-checkins] bpo-40521: Cleanup code of free lists (GH-21082) Message-ID: https://github.com/python/cpython/commit/522691c46e2ae51faaad5bbbce7d959dd61770df commit: 522691c46e2ae51faaad5bbbce7d959dd61770df branch: master author: Victor Stinner committer: GitHub date: 2020-06-23T16:40:40+02:00 summary: bpo-40521: Cleanup code of free lists (GH-21082) Add get_xxx_state() function to factorize duplicated code. files: M Objects/dictobject.c M Objects/floatobject.c M Objects/frameobject.c M Objects/genobject.c M Objects/listobject.c M Objects/sliceobject.c M Objects/tupleobject.c M Python/context.c diff --git a/Objects/dictobject.c b/Objects/dictobject.c index f3b1157177655..b1f11b3e695bb 100644 --- a/Objects/dictobject.c +++ b/Objects/dictobject.c @@ -249,6 +249,15 @@ static uint64_t pydict_global_version = 0; #include "clinic/dictobject.c.h" + +static struct _Py_dict_state * +get_dict_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->dict_state; +} + + void _PyDict_ClearFreeList(PyThreadState *tstate) { @@ -269,8 +278,7 @@ _PyDict_Fini(PyThreadState *tstate) { _PyDict_ClearFreeList(tstate); #ifdef Py_DEBUG - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_dict_state *state = &interp->dict_state; + struct _Py_dict_state *state = get_dict_state(); state->numfree = -1; state->keys_numfree = -1; #endif @@ -281,8 +289,7 @@ _PyDict_Fini(PyThreadState *tstate) void _PyDict_DebugMallocStats(FILE *out) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_dict_state *state = &interp->dict_state; + struct _Py_dict_state *state = get_dict_state(); _PyDebugAllocatorStats(out, "free PyDictObject", state->numfree, sizeof(PyDictObject)); } @@ -557,8 +564,7 @@ new_keys_object(Py_ssize_t size) es = sizeof(Py_ssize_t); } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_dict_state *state = &interp->dict_state; + struct _Py_dict_state *state = get_dict_state(); #ifdef Py_DEBUG // new_keys_object() must not be called after _PyDict_Fini() assert(state->keys_numfree != -1); @@ -598,8 +604,7 @@ free_keys_object(PyDictKeysObject *keys) Py_XDECREF(entries[i].me_key); Py_XDECREF(entries[i].me_value); } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_dict_state *state = &interp->dict_state; + struct _Py_dict_state *state = get_dict_state(); #ifdef Py_DEBUG // free_keys_object() must not be called after _PyDict_Fini() assert(state->keys_numfree != -1); @@ -620,8 +625,7 @@ new_dict(PyDictKeysObject *keys, PyObject **values) { PyDictObject *mp; assert(keys != NULL); - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_dict_state *state = &interp->dict_state; + struct _Py_dict_state *state = get_dict_state(); #ifdef Py_DEBUG // new_dict() must not be called after _PyDict_Fini() assert(state->numfree != -1); @@ -1281,8 +1285,7 @@ dictresize(PyDictObject *mp, Py_ssize_t minsize) #ifdef Py_REF_DEBUG _Py_RefTotal--; #endif - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_dict_state *state = &interp->dict_state; + struct _Py_dict_state *state = get_dict_state(); #ifdef Py_DEBUG // dictresize() must not be called after _PyDict_Fini() assert(state->keys_numfree != -1); @@ -2032,8 +2035,7 @@ dict_dealloc(PyDictObject *mp) assert(keys->dk_refcnt == 1); dictkeys_decref(keys); } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_dict_state *state = &interp->dict_state; + struct _Py_dict_state *state = get_dict_state(); #ifdef Py_DEBUG // new_dict() must not be called after _PyDict_Fini() assert(state->numfree != -1); diff --git a/Objects/floatobject.c b/Objects/floatobject.c index 7ffd7eebe5a45..0606f29ff5408 100644 --- a/Objects/floatobject.c +++ b/Objects/floatobject.c @@ -23,6 +23,15 @@ class float "PyObject *" "&PyFloat_Type" # define PyFloat_MAXFREELIST 100 #endif + +static struct _Py_float_state * +get_float_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->float_state; +} + + double PyFloat_GetMax(void) { @@ -113,8 +122,7 @@ PyFloat_GetInfo(void) PyObject * PyFloat_FromDouble(double fval) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_float_state *state = &interp->float_state; + struct _Py_float_state *state = get_float_state(); PyFloatObject *op = state->free_list; if (op != NULL) { #ifdef Py_DEBUG @@ -222,8 +230,7 @@ static void float_dealloc(PyFloatObject *op) { if (PyFloat_CheckExact(op)) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_float_state *state = &interp->float_state; + struct _Py_float_state *state = get_float_state(); #ifdef Py_DEBUG // float_dealloc() must not be called after _PyFloat_Fini() assert(state->numfree != -1); @@ -236,8 +243,9 @@ float_dealloc(PyFloatObject *op) Py_SET_TYPE(op, (PyTypeObject *)state->free_list); state->free_list = op; } - else + else { Py_TYPE(op)->tp_free((PyObject *)op); + } } double @@ -2017,8 +2025,7 @@ _PyFloat_Fini(PyThreadState *tstate) void _PyFloat_DebugMallocStats(FILE *out) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_float_state *state = &interp->float_state; + struct _Py_float_state *state = get_float_state(); _PyDebugAllocatorStats(out, "free PyFloatObject", state->numfree, sizeof(PyFloatObject)); diff --git a/Objects/frameobject.c b/Objects/frameobject.c index 6e1cbcfaf6f51..7c2bce3615860 100644 --- a/Objects/frameobject.c +++ b/Objects/frameobject.c @@ -22,6 +22,15 @@ static PyMemberDef frame_memberlist[] = { {NULL} /* Sentinel */ }; + +static struct _Py_frame_state * +get_frame_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->frame; +} + + static PyObject * frame_getlocals(PyFrameObject *f, void *closure) { @@ -593,8 +602,7 @@ frame_dealloc(PyFrameObject *f) co->co_zombieframe = f; } else { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_frame_state *state = &interp->frame; + struct _Py_frame_state *state = get_frame_state(); #ifdef Py_DEBUG // frame_dealloc() must not be called after _PyFrame_Fini() assert(state->numfree != -1); @@ -784,8 +792,7 @@ frame_alloc(PyCodeObject *code) Py_ssize_t ncells = PyTuple_GET_SIZE(code->co_cellvars); Py_ssize_t nfrees = PyTuple_GET_SIZE(code->co_freevars); Py_ssize_t extras = code->co_stacksize + code->co_nlocals + ncells + nfrees; - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_frame_state *state = &interp->frame; + struct _Py_frame_state *state = get_frame_state(); if (state->free_list == NULL) { f = PyObject_GC_NewVar(PyFrameObject, &PyFrame_Type, extras); @@ -1206,8 +1213,7 @@ _PyFrame_Fini(PyThreadState *tstate) void _PyFrame_DebugMallocStats(FILE *out) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_frame_state *state = &interp->frame; + struct _Py_frame_state *state = get_frame_state(); _PyDebugAllocatorStats(out, "free PyFrameObject", state->numfree, sizeof(PyFrameObject)); diff --git a/Objects/genobject.c b/Objects/genobject.c index 4207d5326cca1..6a68c9484a6ae 100644 --- a/Objects/genobject.c +++ b/Objects/genobject.c @@ -1389,6 +1389,14 @@ PyTypeObject PyAsyncGen_Type = { }; +static struct _Py_async_gen_state * +get_async_gen_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->async_gen; +} + + PyObject * PyAsyncGen_New(PyFrameObject *f, PyObject *name, PyObject *qualname) { @@ -1477,8 +1485,7 @@ async_gen_asend_dealloc(PyAsyncGenASend *o) _PyObject_GC_UNTRACK((PyObject *)o); Py_CLEAR(o->ags_gen); Py_CLEAR(o->ags_sendval); - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_async_gen_state *state = &interp->async_gen; + struct _Py_async_gen_state *state = get_async_gen_state(); #ifdef Py_DEBUG // async_gen_asend_dealloc() must not be called after _PyAsyncGen_Fini() assert(state->asend_numfree != -1); @@ -1639,8 +1646,7 @@ static PyObject * async_gen_asend_new(PyAsyncGenObject *gen, PyObject *sendval) { PyAsyncGenASend *o; - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_async_gen_state *state = &interp->async_gen; + struct _Py_async_gen_state *state = get_async_gen_state(); #ifdef Py_DEBUG // async_gen_asend_new() must not be called after _PyAsyncGen_Fini() assert(state->asend_numfree != -1); @@ -1678,8 +1684,7 @@ async_gen_wrapped_val_dealloc(_PyAsyncGenWrappedValue *o) { _PyObject_GC_UNTRACK((PyObject *)o); Py_CLEAR(o->agw_val); - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_async_gen_state *state = &interp->async_gen; + struct _Py_async_gen_state *state = get_async_gen_state(); #ifdef Py_DEBUG // async_gen_wrapped_val_dealloc() must not be called after _PyAsyncGen_Fini() assert(state->value_numfree != -1); @@ -1752,8 +1757,7 @@ _PyAsyncGenValueWrapperNew(PyObject *val) _PyAsyncGenWrappedValue *o; assert(val); - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_async_gen_state *state = &interp->async_gen; + struct _Py_async_gen_state *state = get_async_gen_state(); #ifdef Py_DEBUG // _PyAsyncGenValueWrapperNew() must not be called after _PyAsyncGen_Fini() assert(state->value_numfree != -1); diff --git a/Objects/listobject.c b/Objects/listobject.c index 261a0fdfffae0..ab036154eacc2 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -19,6 +19,15 @@ class list "PyListObject *" "&PyList_Type" #include "clinic/listobject.c.h" + +static struct _Py_list_state * +get_list_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->list; +} + + /* Ensure ob_item has room for at least newsize elements, and set * ob_size to newsize. If newsize > ob_size on entry, the content * of the new slots at exit is undefined heap trash; it's the caller's @@ -121,8 +130,7 @@ _PyList_Fini(PyThreadState *tstate) void _PyList_DebugMallocStats(FILE *out) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_list_state *state = &interp->list; + struct _Py_list_state *state = get_list_state(); _PyDebugAllocatorStats(out, "free PyListObject", state->numfree, sizeof(PyListObject)); @@ -136,8 +144,7 @@ PyList_New(Py_ssize_t size) return NULL; } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_list_state *state = &interp->list; + struct _Py_list_state *state = get_list_state(); PyListObject *op; #ifdef Py_DEBUG // PyList_New() must not be called after _PyList_Fini() @@ -336,8 +343,7 @@ list_dealloc(PyListObject *op) } PyMem_FREE(op->ob_item); } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_list_state *state = &interp->list; + struct _Py_list_state *state = get_list_state(); #ifdef Py_DEBUG // list_dealloc() must not be called after _PyList_Fini() assert(state->numfree != -1); diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c index f97a570a787f0..e8af623142b84 100644 --- a/Objects/sliceobject.c +++ b/Objects/sliceobject.c @@ -113,27 +113,35 @@ void _PySlice_Fini(PyThreadState *tstate) PyObject * PySlice_New(PyObject *start, PyObject *stop, PyObject *step) { + if (step == NULL) { + step = Py_None; + } + if (start == NULL) { + start = Py_None; + } + if (stop == NULL) { + stop = Py_None; + } + PyInterpreterState *interp = _PyInterpreterState_GET(); PySliceObject *obj; if (interp->slice_cache != NULL) { obj = interp->slice_cache; interp->slice_cache = NULL; _Py_NewReference((PyObject *)obj); - } else { + } + else { obj = PyObject_GC_New(PySliceObject, &PySlice_Type); - if (obj == NULL) + if (obj == NULL) { return NULL; + } } - if (step == NULL) step = Py_None; Py_INCREF(step); - if (start == NULL) start = Py_None; - Py_INCREF(start); - if (stop == NULL) stop = Py_None; - Py_INCREF(stop); - obj->step = step; + Py_INCREF(start); obj->start = start; + Py_INCREF(stop); obj->stop = stop; _PyObject_GC_TRACK(obj); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index 2ff4c48111fe0..f4f9aa259e8b2 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -14,19 +14,28 @@ class tuple "PyTupleObject *" "&PyTuple_Type" #include "clinic/tupleobject.c.h" + +static struct _Py_tuple_state * +get_tuple_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->tuple; +} + + static inline void tuple_gc_track(PyTupleObject *op) { _PyObject_GC_TRACK(op); } + /* Print summary info about the state of the optimized allocator */ void _PyTuple_DebugMallocStats(FILE *out) { #if PyTuple_MAXSAVESIZE > 0 - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); for (int i = 1; i < PyTuple_MAXSAVESIZE; i++) { char buf[128]; PyOS_snprintf(buf, sizeof(buf), @@ -89,8 +98,7 @@ PyTuple_New(Py_ssize_t size) { PyTupleObject *op; #if PyTuple_MAXSAVESIZE > 0 - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); if (size == 0 && state->free_list[0]) { op = state->free_list[0]; Py_INCREF(op); @@ -198,8 +206,7 @@ PyTuple_Pack(Py_ssize_t n, ...) return PyTuple_New(0); } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); va_start(vargs, n); PyTupleObject *result = tuple_alloc(state, n); @@ -233,8 +240,7 @@ tupledealloc(PyTupleObject *op) Py_XDECREF(op->ob_item[i]); } #if PyTuple_MAXSAVESIZE > 0 - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); #ifdef Py_DEBUG // tupledealloc() must not be called after _PyTuple_Fini() assert(state->numfree[0] != -1); @@ -420,8 +426,7 @@ _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) return PyTuple_New(0); } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); PyTupleObject *tuple = tuple_alloc(state, n); if (tuple == NULL) { return NULL; @@ -492,8 +497,7 @@ tupleconcat(PyTupleObject *a, PyObject *bb) return PyTuple_New(0); } - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); np = tuple_alloc(state, size); if (np == NULL) { return NULL; @@ -537,8 +541,7 @@ tuplerepeat(PyTupleObject *a, Py_ssize_t n) if (n > PY_SSIZE_T_MAX / Py_SIZE(a)) return PyErr_NoMemory(); size = Py_SIZE(a) * n; - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); np = tuple_alloc(state, size); if (np == NULL) return NULL; @@ -804,8 +807,7 @@ tuplesubscript(PyTupleObject* self, PyObject* item) return (PyObject *)self; } else { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_tuple_state *state = &interp->tuple; + struct _Py_tuple_state *state = get_tuple_state(); PyTupleObject* result = tuple_alloc(state, slicelength); if (!result) return NULL; diff --git a/Python/context.c b/Python/context.c index dedbca99384c7..dc34071884750 100644 --- a/Python/context.c +++ b/Python/context.c @@ -66,6 +66,14 @@ static int contextvar_del(PyContextVar *var); +static struct _Py_context_state * +get_context_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->context; +} + + PyObject * _PyContext_NewHamtForTests(void) { @@ -332,8 +340,7 @@ class _contextvars.Context "PyContext *" "&PyContext_Type" static inline PyContext * _context_alloc(void) { - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_context_state *state = &interp->context; + struct _Py_context_state *state = get_context_state(); PyContext *ctx; #ifdef Py_DEBUG // _context_alloc() must not be called after _PyContext_Fini() @@ -462,8 +469,7 @@ context_tp_dealloc(PyContext *self) } (void)context_tp_clear(self); - PyInterpreterState *interp = _PyInterpreterState_GET(); - struct _Py_context_state *state = &interp->context; + struct _Py_context_state *state = get_context_state(); #ifdef Py_DEBUG // _context_alloc() must not be called after _PyContext_Fini() assert(state->numfree != -1); From webhook-mailer at python.org Tue Jun 23 11:43:04 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Jun 2020 15:43:04 -0000 Subject: [Python-checkins] bpo-40521: Empty frozenset is no longer a singleton (GH-21085) Message-ID: https://github.com/python/cpython/commit/f9bd05e83e32bece49de5af0c9a232325c57648a commit: f9bd05e83e32bece49de5af0c9a232325c57648a branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-23T17:42:55+02:00 summary: bpo-40521: Empty frozenset is no longer a singleton (GH-21085) * Revert "bpo-40521: Make the empty frozenset per interpreter (GH-21068)" This reverts commit 261cfedf7657a515e04428bba58eba2a9bb88208. * bpo-40521: Empty frozensets are no longer singletons * Complete the removal of the frozenset singleton files: A Misc/NEWS.d/next/Core and Builtins/2020-06-23-07-35-11.bpo-40521.dMNA6k.rst M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Lib/test/test_marshal.py M Lib/test/test_set.py M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Objects/setobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 64e891f9f6eb4..c22bea75d2795 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -244,8 +244,6 @@ struct _is { /* Using a cache is very effective since typically only a single slice is created and then deleted again. */ PySliceObject *slice_cache; - // The empty frozenset is a singleton. - PyObject *empty_frozenset; struct _Py_tuple_state tuple; struct _Py_list_state list; diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 9a3063aa2775f..30ba48423f9ec 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -62,7 +62,6 @@ extern void _PyFrame_Fini(PyThreadState *tstate); extern void _PyDict_Fini(PyThreadState *tstate); extern void _PyTuple_Fini(PyThreadState *tstate); extern void _PyList_Fini(PyThreadState *tstate); -extern void _PySet_Fini(PyThreadState *tstate); extern void _PyBytes_Fini(PyThreadState *tstate); extern void _PyFloat_Fini(PyThreadState *tstate); extern void _PySlice_Fini(PyThreadState *tstate); diff --git a/Lib/test/test_marshal.py b/Lib/test/test_marshal.py index ace1593999d4e..b7f4dbb98e36d 100644 --- a/Lib/test/test_marshal.py +++ b/Lib/test/test_marshal.py @@ -158,13 +158,6 @@ def test_sets(self): for constructor in (set, frozenset): self.helper(constructor(self.d.keys())) - @support.cpython_only - def test_empty_frozenset_singleton(self): - # marshal.loads() must reuse the empty frozenset singleton - obj = frozenset() - obj2 = marshal.loads(marshal.dumps(obj)) - self.assertIs(obj2, obj) - class BufferTestCase(unittest.TestCase, HelperMixin): diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py index 9851a998983f8..68d494213e587 100644 --- a/Lib/test/test_set.py +++ b/Lib/test/test_set.py @@ -661,15 +661,6 @@ def test_init(self): s.__init__(self.otherword) self.assertEqual(s, set(self.word)) - def test_singleton_empty_frozenset(self): - f = frozenset() - efs = [frozenset(), frozenset([]), frozenset(()), frozenset(''), - frozenset(), frozenset([]), frozenset(()), frozenset(''), - frozenset(range(0)), frozenset(frozenset()), - frozenset(f), f] - # All of the empty frozensets should have just one id() - self.assertEqual(len(set(map(id, efs))), 1) - def test_constructor_identity(self): s = self.thetype(range(3)) t = self.thetype(s) diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 95fab369748f0..a62383d2093ec 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -2,8 +2,9 @@ Each interpreter now its has own free lists, singletons and caches: * Free lists: float, tuple, list, dict, frame, context, asynchronous generator. -* Singletons: empty tuple, empty frozenset, empty bytes string, +* Singletons: empty tuple, empty bytes string, single byte character. * Slice cache. They are no longer shared by all interpreters. + diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-23-07-35-11.bpo-40521.dMNA6k.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-07-35-11.bpo-40521.dMNA6k.rst new file mode 100644 index 0000000000000..25f146e35ef43 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-07-35-11.bpo-40521.dMNA6k.rst @@ -0,0 +1 @@ +Empty frozensets are no longer singletons. diff --git a/Objects/setobject.c b/Objects/setobject.c index 69bfc7d0a58fb..b2711495b657b 100644 --- a/Objects/setobject.c +++ b/Objects/setobject.c @@ -978,38 +978,16 @@ make_new_set_basetype(PyTypeObject *type, PyObject *iterable) static PyObject * make_new_frozenset(PyTypeObject *type, PyObject *iterable) { - PyObject *res; - if (type != &PyFrozenSet_Type) { return make_new_set(type, iterable); } - if (iterable != NULL) { - if (PyFrozenSet_CheckExact(iterable)) { - /* frozenset(f) is idempotent */ - Py_INCREF(iterable); - return iterable; - } - res = make_new_set((PyTypeObject *)type, iterable); - if (res == NULL || PySet_GET_SIZE(res) != 0) { - return res; - } - /* If the created frozenset is empty, return the empty frozenset singleton instead */ - Py_DECREF(res); + if (iterable != NULL && PyFrozenSet_CheckExact(iterable)) { + /* frozenset(f) is idempotent */ + Py_INCREF(iterable); + return iterable; } - - // The empty frozenset is a singleton - PyInterpreterState *interp = _PyInterpreterState_GET(); - res = interp->empty_frozenset; - if (res == NULL) { - interp->empty_frozenset = make_new_set((PyTypeObject *)type, NULL); - res = interp->empty_frozenset; - if (res == NULL) { - return NULL; - } - } - Py_INCREF(res); - return res; + return make_new_set((PyTypeObject *)type, iterable); } static PyObject * @@ -2304,12 +2282,6 @@ PySet_Add(PyObject *anyset, PyObject *key) return set_add_key((PySetObject *)anyset, key); } -void -_PySet_Fini(PyThreadState *tstate) -{ - Py_CLEAR(tstate->interp->empty_frozenset); -} - int _PySet_NextEntry(PyObject *set, Py_ssize_t *pos, PyObject **key, Py_hash_t *hash) { diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index f0770727f4de7..09945a8f7a6a0 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -1253,7 +1253,6 @@ finalize_interp_types(PyThreadState *tstate, int is_main_interp) _PyAsyncGen_Fini(tstate); _PyContext_Fini(tstate); - _PySet_Fini(tstate); _PyDict_Fini(tstate); _PyList_Fini(tstate); _PyTuple_Fini(tstate); From webhook-mailer at python.org Tue Jun 23 14:28:33 2020 From: webhook-mailer at python.org (Ammar Askar) Date: Tue, 23 Jun 2020 18:28:33 -0000 Subject: [Python-checkins] bpo-39699: Remove accidentally committed test change (GH-21089) Message-ID: https://github.com/python/cpython/commit/6eab52ffadb2836adb59d0578c84d247f05e19b1 commit: 6eab52ffadb2836adb59d0578c84d247f05e19b1 branch: 3.8 author: Ammar Askar committer: GitHub date: 2020-06-23T19:28:25+01:00 summary: bpo-39699: Remove accidentally committed test change (GH-21089) files: M Doc/whatsnew/3.0.rst diff --git a/Doc/whatsnew/3.0.rst b/Doc/whatsnew/3.0.rst index 6b8bd8861fe9b..880958d3edb90 100644 --- a/Doc/whatsnew/3.0.rst +++ b/Doc/whatsnew/3.0.rst @@ -2,8 +2,6 @@ What's New In Python 3.0 **************************** -TEST CHANGE TO BE UNDONE - .. XXX Add trademark info for Apple, Microsoft. :Author: Guido van Rossum From webhook-mailer at python.org Tue Jun 23 14:41:58 2020 From: webhook-mailer at python.org (Nikita Nemkin) Date: Tue, 23 Jun 2020 18:41:58 -0000 Subject: [Python-checkins] bpo-41089: Filters and other issues in Visual Studio projects (GH-21070) Message-ID: https://github.com/python/cpython/commit/fe2a48c605d98ac02ab2b9593cb87ce364aeae2d commit: fe2a48c605d98ac02ab2b9593cb87ce364aeae2d branch: master author: Nikita Nemkin committer: GitHub date: 2020-06-23T19:41:49+01:00 summary: bpo-41089: Filters and other issues in Visual Studio projects (GH-21070) * Add missing header files to pythoncore. * Add missing file filters ("Resource Files" in particular) to all projects. * Add new sub-filters for private headers in pythoncore and for 3rd party source files. * Add missing _zoneinfo configurations in pcbuild.sln. * Update bdist_wininst with the new zlib location. files: A PCbuild/liblzma.vcxproj.filters A PCbuild/python_uwp.vcxproj.filters A PCbuild/pythonw_uwp.vcxproj.filters A PCbuild/venvlauncher.vcxproj.filters A PCbuild/venvwlauncher.vcxproj.filters M PC/bdist_wininst/bdist_wininst.vcxproj M PC/bdist_wininst/bdist_wininst.vcxproj.filters M PCbuild/_asyncio.vcxproj.filters M PCbuild/_bz2.vcxproj.filters M PCbuild/_ctypes.vcxproj.filters M PCbuild/_ctypes_test.vcxproj.filters M PCbuild/_decimal.vcxproj.filters M PCbuild/_elementtree.vcxproj.filters M PCbuild/_freeze_importlib.vcxproj.filters M PCbuild/_hashlib.vcxproj.filters M PCbuild/_lzma.vcxproj.filters M PCbuild/_msi.vcxproj.filters M PCbuild/_multiprocessing.vcxproj.filters M PCbuild/_overlapped.vcxproj.filters M PCbuild/_queue.vcxproj.filters M PCbuild/_socket.vcxproj.filters M PCbuild/_sqlite3.vcxproj.filters M PCbuild/_ssl.vcxproj.filters M PCbuild/_testbuffer.vcxproj.filters M PCbuild/_testcapi.vcxproj.filters M PCbuild/_testconsole.vcxproj.filters M PCbuild/_testembed.vcxproj.filters M PCbuild/_testimportmultiple.vcxproj.filters M PCbuild/_testinternalcapi.vcxproj.filters M PCbuild/_testmultiphase.vcxproj.filters M PCbuild/_tkinter.vcxproj.filters M PCbuild/_uuid.vcxproj.filters M PCbuild/_zoneinfo.vcxproj.filters M PCbuild/pcbuild.sln M PCbuild/pyexpat.vcxproj.filters M PCbuild/pylauncher.vcxproj.filters M PCbuild/pyshellext.vcxproj.filters M PCbuild/python3dll.vcxproj.filters M PCbuild/pythoncore.vcxproj M PCbuild/pythoncore.vcxproj.filters M PCbuild/pythonw.vcxproj.filters M PCbuild/pywlauncher.vcxproj.filters M PCbuild/select.vcxproj.filters M PCbuild/sqlite3.vcxproj.filters M PCbuild/unicodedata.vcxproj.filters M PCbuild/winsound.vcxproj.filters diff --git a/PC/bdist_wininst/bdist_wininst.vcxproj b/PC/bdist_wininst/bdist_wininst.vcxproj index d2f1bb75e30d8..390bf4e62728c 100644 --- a/PC/bdist_wininst/bdist_wininst.vcxproj +++ b/PC/bdist_wininst/bdist_wininst.vcxproj @@ -87,7 +87,7 @@ MinSpace - $(PySourcePath)Modules\zlib;%(AdditionalIncludeDirectories) + $(zlibDir);%(AdditionalIncludeDirectories) _CRT_SECURE_NO_DEPRECATE;_CRT_NONSTDC_NO_DEPRECATE;%(PreprocessorDefinitions) MultiThreadedDebug MultiThreaded @@ -102,15 +102,16 @@ - - - - - - + + + + + + + diff --git a/PC/bdist_wininst/bdist_wininst.vcxproj.filters b/PC/bdist_wininst/bdist_wininst.vcxproj.filters index b4b6a4d514db6..c97742e723970 100644 --- a/PC/bdist_wininst/bdist_wininst.vcxproj.filters +++ b/PC/bdist_wininst/bdist_wininst.vcxproj.filters @@ -16,45 +16,51 @@ {0c77c1cf-3f87-4f87-bd86-b425211c2181} ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe + + {d10220c7-69e3-47c5-8d82-c8e0d4d2ac88} + - + Source Files - + Source Files - + Source Files\zlib - + Source Files\zlib - + Source Files\zlib - + Source Files\zlib - + Source Files\zlib - + Source Files\zlib - - Header Files - - - - + Resource Files - + + Header Files + + + Header Files\zlib + + + + Resource Files diff --git a/PCbuild/_asyncio.vcxproj.filters b/PCbuild/_asyncio.vcxproj.filters index 10a186cdad1cd..a09ae928a9b5e 100644 --- a/PCbuild/_asyncio.vcxproj.filters +++ b/PCbuild/_asyncio.vcxproj.filters @@ -1,16 +1,21 @@ ? - - - {2422278e-eeeb-4241-8182-433e2bc5a7fc} + + {41f1cd52-b682-46aa-a7fd-7bdf81a18010} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_bz2.vcxproj.filters b/PCbuild/_bz2.vcxproj.filters index eac64dd29d6c0..7c0b5162537e0 100644 --- a/PCbuild/_bz2.vcxproj.filters +++ b/PCbuild/_bz2.vcxproj.filters @@ -4,10 +4,16 @@ {f53a859d-dad2-4d5b-ae41-f28d8b571f5a} - + + {b90c3cee-7700-4e87-bf85-0801866e8d0d} + + {7e0bed05-ae33-43b7-8797-656455bbb7f3} - + + {b53f67d8-fdf0-4e10-a987-e44475ff434a} + + {ed574b89-6983-4cdf-9f98-fe7048d9e89c} @@ -16,33 +22,38 @@ Source Files - bzip2 1.0.6 Source Files + Source Files\bzip2 - bzip2 1.0.6 Source Files + Source Files\bzip2 - bzip2 1.0.6 Source Files + Source Files\bzip2 - bzip2 1.0.6 Source Files + Source Files\bzip2 - bzip2 1.0.6 Source Files + Source Files\bzip2 - bzip2 1.0.6 Source Files + Source Files\bzip2 - bzip2 1.0.6 Source Files + Source Files\bzip2 - - bzip2 1.0.6 Header Files - - bzip2 1.0.6 Header Files + Header Files\bzip2 + + Header Files\bzip2 + + + + + Resource Files + - + \ No newline at end of file diff --git a/PCbuild/_ctypes.vcxproj.filters b/PCbuild/_ctypes.vcxproj.filters index 3123286347aee..118c4f0698ccb 100644 --- a/PCbuild/_ctypes.vcxproj.filters +++ b/PCbuild/_ctypes.vcxproj.filters @@ -7,6 +7,9 @@ {dbdea1f2-ad8b-44ca-b782-fcf65d91559b} + + {31a37bb4-c384-41ff-9ec1-8ad98d482e22} + @@ -37,6 +40,8 @@ - + + Resource Files + \ No newline at end of file diff --git a/PCbuild/_ctypes_test.vcxproj.filters b/PCbuild/_ctypes_test.vcxproj.filters index c1e94ce1faa0a..5174196c52e4d 100644 --- a/PCbuild/_ctypes_test.vcxproj.filters +++ b/PCbuild/_ctypes_test.vcxproj.filters @@ -7,6 +7,9 @@ {38abc486-e143-49dc-8cf0-8aefab0e0d3d} + + {5030ff8f-daf5-4bc8-b1dd-e8b59d34c511} + @@ -18,4 +21,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_decimal.vcxproj.filters b/PCbuild/_decimal.vcxproj.filters index 5f7de3d85381e..0cbd3d0736c24 100644 --- a/PCbuild/_decimal.vcxproj.filters +++ b/PCbuild/_decimal.vcxproj.filters @@ -7,55 +7,64 @@ {138089f8-faba-494f-b6ed-051f31fbaf2d} + + {632b24a3-0844-4e57-ad34-b0e4cef886dd} + + + {322d127c-1105-4a31-aed2-e29cdececc77} + + + {780c3b7a-7817-4e89-a2f2-fc522f2c5966} + - + Header Files + + Header Files\libmpdec + - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files - - - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec - Header Files + Header Files\libmpdec @@ -63,51 +72,56 @@ Source Files - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec - Source Files + Source Files\libmpdec + + + Resource Files + + - Source Files + Source Files\libmpdec - + \ No newline at end of file diff --git a/PCbuild/_elementtree.vcxproj.filters b/PCbuild/_elementtree.vcxproj.filters index 6acdf35846ab1..bc14e31f32b95 100644 --- a/PCbuild/_elementtree.vcxproj.filters +++ b/PCbuild/_elementtree.vcxproj.filters @@ -7,52 +7,61 @@ {7b5335ad-059f-486f-85e4-f4757e26a9bf} + + {37d3ef0a-1ea6-492d-bba7-b83865198caa} + + + {6099ed72-6668-4779-adb2-a2362e5da3b9} + + + {f99990ba-cd06-40cc-8f28-d2d424ec13be} + - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat - Header Files + Header Files\expat @@ -60,13 +69,18 @@ Source Files - Source Files + Source Files\expat - Source Files + Source Files\expat - Source Files + Source Files\expat - + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/_freeze_importlib.vcxproj.filters b/PCbuild/_freeze_importlib.vcxproj.filters index ccad053a9f536..3ee9eb750d67e 100644 --- a/PCbuild/_freeze_importlib.vcxproj.filters +++ b/PCbuild/_freeze_importlib.vcxproj.filters @@ -5,13 +5,8 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - - - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} - rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms + + {eb238244-ace1-48fc-97a4-16ff886f8642} @@ -23,5 +18,11 @@ Source Files + + Python Files + + + Python Files + - + \ No newline at end of file diff --git a/PCbuild/_hashlib.vcxproj.filters b/PCbuild/_hashlib.vcxproj.filters index a9d993eb3ed4a..7a0700c007f64 100644 --- a/PCbuild/_hashlib.vcxproj.filters +++ b/PCbuild/_hashlib.vcxproj.filters @@ -4,10 +4,18 @@ {cc45963d-bd25-4eb8-bdba-a5507090bca4} + + {67630fa4-76e4-4035-bced-043a6df1e2e0} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_lzma.vcxproj.filters b/PCbuild/_lzma.vcxproj.filters index 0c5ed74608e6e..e23e5399df449 100644 --- a/PCbuild/_lzma.vcxproj.filters +++ b/PCbuild/_lzma.vcxproj.filters @@ -4,10 +4,18 @@ {53e68eda-39fc-4336-a658-dc5f5d598760} + + {9e5ecf81-2940-4dd5-af98-58e98810d030} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_msi.vcxproj.filters b/PCbuild/_msi.vcxproj.filters index 7dec52de0b3e5..a94fb18e61e48 100644 --- a/PCbuild/_msi.vcxproj.filters +++ b/PCbuild/_msi.vcxproj.filters @@ -4,10 +4,18 @@ {bdef7710-e433-4ac0-84e0-14f34454bd3e} + + {8513f324-7c13-4657-b463-5d686a8a5371} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_multiprocessing.vcxproj.filters b/PCbuild/_multiprocessing.vcxproj.filters index 78133ec0ec845..85caddb10db28 100644 --- a/PCbuild/_multiprocessing.vcxproj.filters +++ b/PCbuild/_multiprocessing.vcxproj.filters @@ -7,6 +7,9 @@ {34615a62-f999-4659-83f5-19d17a644530} + + {1dcf6347-2248-42e1-ab3c-1b19f4f6f647} + @@ -21,4 +24,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_overlapped.vcxproj.filters b/PCbuild/_overlapped.vcxproj.filters index 78de895055905..29a9b77740b15 100644 --- a/PCbuild/_overlapped.vcxproj.filters +++ b/PCbuild/_overlapped.vcxproj.filters @@ -4,10 +4,18 @@ {6f67c8db-7de7-4714-a967-2b0d4bc71f2e} + + {83fe502d-eca2-4505-b626-eddec9b6ea9f} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_queue.vcxproj.filters b/PCbuild/_queue.vcxproj.filters index 88b80826adfee..ec48e9d3aeddf 100644 --- a/PCbuild/_queue.vcxproj.filters +++ b/PCbuild/_queue.vcxproj.filters @@ -1,16 +1,21 @@ ? - - - {c56a5dd3-7838-48e9-a781-855d8be7370f} + + {bc5dc97e-11b8-435a-82e7-2ef3c9b44f5e} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_socket.vcxproj.filters b/PCbuild/_socket.vcxproj.filters index 790ffa80dd4c8..453175c6a43a6 100644 --- a/PCbuild/_socket.vcxproj.filters +++ b/PCbuild/_socket.vcxproj.filters @@ -7,6 +7,9 @@ {1edfe0d0-7b9d-4dc8-a335-b21fef7cc77a} + + {f8efff18-28ed-4c6b-8e8d-fa816d9a81a8} + @@ -18,4 +21,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_sqlite3.vcxproj.filters b/PCbuild/_sqlite3.vcxproj.filters index dce77c96a8059..51830f6a4451a 100644 --- a/PCbuild/_sqlite3.vcxproj.filters +++ b/PCbuild/_sqlite3.vcxproj.filters @@ -7,6 +7,9 @@ {814b187d-44ad-4f2b-baa7-18ca8a8a6a77} + + {225f58de-2bad-4e4d-bc0b-fe74ed6bf5f1} + @@ -66,4 +69,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_ssl.vcxproj.filters b/PCbuild/_ssl.vcxproj.filters index bd46b609840b2..716a69a41af35 100644 --- a/PCbuild/_ssl.vcxproj.filters +++ b/PCbuild/_ssl.vcxproj.filters @@ -4,6 +4,9 @@ {695348f7-e9f6-4fe1-bc03-5f08ffc8095b} + + {1b18a2e6-040d-46c7-a9ac-ac2ec64fb5d6} + @@ -14,6 +17,8 @@ - + + Resource Files + \ No newline at end of file diff --git a/PCbuild/_testbuffer.vcxproj.filters b/PCbuild/_testbuffer.vcxproj.filters index eadf282750baa..bea4260b9f743 100644 --- a/PCbuild/_testbuffer.vcxproj.filters +++ b/PCbuild/_testbuffer.vcxproj.filters @@ -4,10 +4,18 @@ {8d232240-921a-4bc2-87c3-93ffd3462f0a} + + {1e73201a-cca4-4b45-9484-262709cafee7} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_testcapi.vcxproj.filters b/PCbuild/_testcapi.vcxproj.filters index 7a6ad1a864eca..53f64b7aa1e1a 100644 --- a/PCbuild/_testcapi.vcxproj.filters +++ b/PCbuild/_testcapi.vcxproj.filters @@ -4,10 +4,18 @@ {a76a90d8-8e8b-4c36-8f58-8bd46abe9f5e} + + {071b2ff4-e5a1-4e79-b0c5-cf46b0094a80} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_testconsole.vcxproj.filters b/PCbuild/_testconsole.vcxproj.filters index 0c25101e1b4fb..321e17785fddb 100644 --- a/PCbuild/_testconsole.vcxproj.filters +++ b/PCbuild/_testconsole.vcxproj.filters @@ -5,18 +5,19 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms - + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_testembed.vcxproj.filters b/PCbuild/_testembed.vcxproj.filters index f7f9abeb1dfaa..b90fd85f79425 100644 --- a/PCbuild/_testembed.vcxproj.filters +++ b/PCbuild/_testembed.vcxproj.filters @@ -5,10 +5,6 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms @@ -19,4 +15,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_testimportmultiple.vcxproj.filters b/PCbuild/_testimportmultiple.vcxproj.filters index db0d0a65e1290..8f63d13482513 100644 --- a/PCbuild/_testimportmultiple.vcxproj.filters +++ b/PCbuild/_testimportmultiple.vcxproj.filters @@ -4,10 +4,18 @@ {1ec38ad9-1abf-4b80-8628-ac43ccba324b} + + {0ff128a6-7814-4f8e-826e-860a858104ee} + Source Files - + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/_testinternalcapi.vcxproj.filters b/PCbuild/_testinternalcapi.vcxproj.filters index 4644f230be1ce..7734da0b7b426 100644 --- a/PCbuild/_testinternalcapi.vcxproj.filters +++ b/PCbuild/_testinternalcapi.vcxproj.filters @@ -4,10 +4,18 @@ {136fc5eb-7fe4-4486-8c6d-b49f37a00199} + + {acecc890-f8dd-4942-b6d2-1fd8f73a5d6c} + Source Files - + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/_testmultiphase.vcxproj.filters b/PCbuild/_testmultiphase.vcxproj.filters index 0c25101e1b4fb..8df0a42e9f3c7 100644 --- a/PCbuild/_testmultiphase.vcxproj.filters +++ b/PCbuild/_testmultiphase.vcxproj.filters @@ -5,10 +5,6 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms @@ -19,4 +15,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_tkinter.vcxproj.filters b/PCbuild/_tkinter.vcxproj.filters index 4e5cd479e30b5..0d919439a76af 100644 --- a/PCbuild/_tkinter.vcxproj.filters +++ b/PCbuild/_tkinter.vcxproj.filters @@ -4,6 +4,9 @@ {b9ce64dd-cb95-472d-bbe8-5583b2cd375b} + + {2bd3a90c-5b2e-45fb-9b2a-fbf1a4faf5f9} + @@ -13,4 +16,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_uuid.vcxproj.filters b/PCbuild/_uuid.vcxproj.filters index 1794929231434..705902ff50c3a 100644 --- a/PCbuild/_uuid.vcxproj.filters +++ b/PCbuild/_uuid.vcxproj.filters @@ -5,10 +5,18 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + {4fa4dbfa-e069-4ab4-86a6-ad389b2ec407} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/_zoneinfo.vcxproj.filters b/PCbuild/_zoneinfo.vcxproj.filters index e3c3ef4c72a7f..57f31e05d561a 100644 --- a/PCbuild/_zoneinfo.vcxproj.filters +++ b/PCbuild/_zoneinfo.vcxproj.filters @@ -1,16 +1,21 @@ ? - - - {2422278e-eeeb-4241-8182-433e2bc5a7fc} + + {0616fb85-7891-4790-83c2-005f906cf555} + Source Files - + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/liblzma.vcxproj.filters b/PCbuild/liblzma.vcxproj.filters new file mode 100644 index 0000000000000..3f58351fa9edb --- /dev/null +++ b/PCbuild/liblzma.vcxproj.filters @@ -0,0 +1,435 @@ +? + + + + {cb1870af-3c7e-48ba-bd7f-3e87468f8ed7} + + + {58761ffe-2af0-42a8-9f93-4e57e1954c36} + + + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + Source Files + + + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + Header Files + + + \ No newline at end of file diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln index 61db4e02ad383..4b6dc1e6771dc 100644 --- a/PCbuild/pcbuild.sln +++ b/PCbuild/pcbuild.sln @@ -1267,6 +1267,38 @@ Global {384C224A-7474-476E-A01B-750EA7DE918C}.Release|Win32.Build.0 = Release|Win32 {384C224A-7474-476E-A01B-750EA7DE918C}.Release|x64.ActiveCfg = Release|x64 {384C224A-7474-476E-A01B-750EA7DE918C}.Release|x64.Build.0 = Release|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|ARM.ActiveCfg = Debug|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|ARM.Build.0 = Debug|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|ARM64.ActiveCfg = Debug|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|ARM64.Build.0 = Debug|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|Win32.ActiveCfg = Debug|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|Win32.Build.0 = Debug|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|x64.ActiveCfg = Debug|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Debug|x64.Build.0 = Debug|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|ARM.ActiveCfg = PGInstrument|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|ARM.Build.0 = PGInstrument|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|ARM64.ActiveCfg = PGInstrument|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|ARM64.Build.0 = PGInstrument|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|Win32.ActiveCfg = PGInstrument|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|Win32.Build.0 = PGInstrument|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|x64.ActiveCfg = PGInstrument|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGInstrument|x64.Build.0 = PGInstrument|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|ARM.ActiveCfg = PGUpdate|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|ARM.Build.0 = PGUpdate|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|ARM64.ActiveCfg = PGUpdate|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|ARM64.Build.0 = PGUpdate|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|Win32.ActiveCfg = PGUpdate|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|Win32.Build.0 = PGUpdate|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|x64.ActiveCfg = PGUpdate|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.PGUpdate|x64.Build.0 = PGUpdate|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|ARM.ActiveCfg = Release|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|ARM.Build.0 = Release|ARM + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|ARM64.ActiveCfg = Release|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|ARM64.Build.0 = Release|ARM64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|Win32.ActiveCfg = Release|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|Win32.Build.0 = Release|Win32 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|x64.ActiveCfg = Release|x64 + {FCBE1EF2-E0F0-40B1-88B5-00A35D378742}.Release|x64.Build.0 = Release|x64 {78D80A15-BD8C-44E2-B49E-1F05B0A0A687}.Debug|ARM.ActiveCfg = Debug|ARM {78D80A15-BD8C-44E2-B49E-1F05B0A0A687}.Debug|ARM.Build.0 = Debug|ARM {78D80A15-BD8C-44E2-B49E-1F05B0A0A687}.Debug|ARM64.ActiveCfg = Debug|ARM64 diff --git a/PCbuild/pyexpat.vcxproj.filters b/PCbuild/pyexpat.vcxproj.filters index f8d46026c9c28..fd22fc8c477df 100644 --- a/PCbuild/pyexpat.vcxproj.filters +++ b/PCbuild/pyexpat.vcxproj.filters @@ -7,6 +7,9 @@ {5af9d40c-fc46-4640-ad84-3d1dd34a71d7} + + {f1dbbdb5-41e5-4a88-bf8e-13da010c0ce4} + @@ -30,4 +33,9 @@ Source Files - + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/pylauncher.vcxproj.filters b/PCbuild/pylauncher.vcxproj.filters index e4b23d2af8366..17d0389ca50f1 100644 --- a/PCbuild/pylauncher.vcxproj.filters +++ b/PCbuild/pylauncher.vcxproj.filters @@ -5,10 +5,6 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms diff --git a/PCbuild/pyshellext.vcxproj.filters b/PCbuild/pyshellext.vcxproj.filters index 648e499f6f402..36d1d1655f5df 100644 --- a/PCbuild/pyshellext.vcxproj.filters +++ b/PCbuild/pyshellext.vcxproj.filters @@ -5,10 +5,6 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms diff --git a/PCbuild/python3dll.vcxproj.filters b/PCbuild/python3dll.vcxproj.filters index d250c45254e80..a7566052e1204 100644 --- a/PCbuild/python3dll.vcxproj.filters +++ b/PCbuild/python3dll.vcxproj.filters @@ -5,10 +5,6 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav diff --git a/PCbuild/python_uwp.vcxproj.filters b/PCbuild/python_uwp.vcxproj.filters new file mode 100644 index 0000000000000..79e87461eb1b7 --- /dev/null +++ b/PCbuild/python_uwp.vcxproj.filters @@ -0,0 +1,26 @@ +? + + + + {fd8bf000-0bbe-4fd4-ac49-29036e5a5c5a} + + + {a0d4ce0b-a7b5-4a77-b6c2-d2ddb9bd49b8} + + + + + Resource Files + + + + + Resource Files + + + + + Source Files + + + \ No newline at end of file diff --git a/PCbuild/pythoncore.vcxproj b/PCbuild/pythoncore.vcxproj index 54e23c2c2b505..db26e38911bc0 100644 --- a/PCbuild/pythoncore.vcxproj +++ b/PCbuild/pythoncore.vcxproj @@ -134,8 +134,10 @@ + + @@ -174,6 +176,7 @@ + diff --git a/PCbuild/pythoncore.vcxproj.filters b/PCbuild/pythoncore.vcxproj.filters index 4ba0bc2ac3bfa..1f3883768c9a2 100644 --- a/PCbuild/pythoncore.vcxproj.filters +++ b/PCbuild/pythoncore.vcxproj.filters @@ -31,6 +31,12 @@ {c3e03a5c-56c7-45fd-8543-e5d2326b907d} + + {86ffb5eb-c423-43aa-b736-a8850d3277df} + + + {875bf4f2-ac42-46bd-b703-8371a824ec32} + @@ -75,72 +81,6 @@ Include - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - Include @@ -183,105 +123,6 @@ Include - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - - - Include - Include @@ -549,6 +390,189 @@ Include + + Parser + + + Parser + + + Include + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include\cpython + + + Include + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + + + Include\internal + Modules\zlib @@ -629,9 +653,6 @@ Modules - - Modules - Modules @@ -695,9 +716,6 @@ Modules - - Modules - Modules @@ -893,18 +911,6 @@ Objects - - Parser - - - Parser - - - Parser - - - Parser - Parser @@ -956,9 +962,6 @@ Python - - Python - Python @@ -998,9 +1001,6 @@ Python - - Python - Modules @@ -1118,9 +1118,6 @@ Objects - - PC - Modules @@ -1157,10 +1154,34 @@ Modules\zlib + + Python + + + Parser + + + Parser + + + Parser + + + Modules + + + Parser + + + Python + + + Objects + Resource Files - + \ No newline at end of file diff --git a/PCbuild/pythonw.vcxproj.filters b/PCbuild/pythonw.vcxproj.filters index 259866fbcc2f8..20d87051e2511 100644 --- a/PCbuild/pythonw.vcxproj.filters +++ b/PCbuild/pythonw.vcxproj.filters @@ -8,14 +8,14 @@ {e1d8ea6b-c65d-42f4-9eed-6010846ed378} - - - Resource Files - - Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/pythonw_uwp.vcxproj.filters b/PCbuild/pythonw_uwp.vcxproj.filters new file mode 100644 index 0000000000000..2f39bdea9e910 --- /dev/null +++ b/PCbuild/pythonw_uwp.vcxproj.filters @@ -0,0 +1,26 @@ +? + + + + {de05f656-4dcb-4fe7-9946-5c325ea2d842} + + + {4102e199-3e5c-42d0-b37b-d42394b20d9e} + + + + + Resource Files + + + + + Resource Files + + + + + Source Files + + + \ No newline at end of file diff --git a/PCbuild/pywlauncher.vcxproj.filters b/PCbuild/pywlauncher.vcxproj.filters index e4b23d2af8366..17d0389ca50f1 100644 --- a/PCbuild/pywlauncher.vcxproj.filters +++ b/PCbuild/pywlauncher.vcxproj.filters @@ -5,10 +5,6 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx - - {93995380-89BD-4b04-88EB-625FBE52EBFB} - h;hpp;hxx;hm;inl;inc;xsd - {67DA6AB6-F800-4c08-8B7A-83BB121AAD01} rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms diff --git a/PCbuild/select.vcxproj.filters b/PCbuild/select.vcxproj.filters index 55a2fc834a357..5354820dd49cc 100644 --- a/PCbuild/select.vcxproj.filters +++ b/PCbuild/select.vcxproj.filters @@ -4,10 +4,18 @@ {98346077-900c-4c7a-852f-a23470e37b40} + + {b47a8e6c-47c0-4490-aa91-1a3624a0905c} + Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/sqlite3.vcxproj.filters b/PCbuild/sqlite3.vcxproj.filters index 86680c1c38303..76662ed68d024 100644 --- a/PCbuild/sqlite3.vcxproj.filters +++ b/PCbuild/sqlite3.vcxproj.filters @@ -7,6 +7,9 @@ {0e842fe2-176b-4e83-9d1f-0ad13a859efd} + + {0248795a-00c9-4090-ad61-55ae23438598} + @@ -21,4 +24,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/unicodedata.vcxproj.filters b/PCbuild/unicodedata.vcxproj.filters index 20e640ee11ede..7ff97835c9623 100644 --- a/PCbuild/unicodedata.vcxproj.filters +++ b/PCbuild/unicodedata.vcxproj.filters @@ -7,6 +7,9 @@ {e2c055bb-ec62-4bbc-aa1c-d88da4d4ad1c} + + {d04f3447-67b0-42aa-b84f-9fc0029d5af7} + @@ -21,4 +24,9 @@ Source Files + + + Resource Files + + \ No newline at end of file diff --git a/PCbuild/venvlauncher.vcxproj.filters b/PCbuild/venvlauncher.vcxproj.filters new file mode 100644 index 0000000000000..ec13936bf6cb7 --- /dev/null +++ b/PCbuild/venvlauncher.vcxproj.filters @@ -0,0 +1,26 @@ +? + + + + {8f3ab79e-3cba-4e6d-82b2-559ce946de58} + + + {4a2423af-e5d1-4c88-b308-d71b768977df} + + + + + Resource Files + + + + + Resource Files + + + + + Source Files + + + \ No newline at end of file diff --git a/PCbuild/venvwlauncher.vcxproj.filters b/PCbuild/venvwlauncher.vcxproj.filters new file mode 100644 index 0000000000000..8addc13e977e7 --- /dev/null +++ b/PCbuild/venvwlauncher.vcxproj.filters @@ -0,0 +1,26 @@ +? + + + + {7683f248-9c32-4e72-a329-5bd84993f63a} + + + {61b34b26-ce53-405d-a743-b370ff505887} + + + + + Source Files + + + + + Resource Files + + + + + Resource Files + + + \ No newline at end of file diff --git a/PCbuild/winsound.vcxproj.filters b/PCbuild/winsound.vcxproj.filters index 7d08931b186d9..6775802638205 100644 --- a/PCbuild/winsound.vcxproj.filters +++ b/PCbuild/winsound.vcxproj.filters @@ -5,10 +5,18 @@ {4FC737F1-C7A5-4376-A066-2A32D752A2FF} cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx + + {6be42502-398f-4bec-8677-8809a2da0eef} + Source Files + + + Resource Files + + \ No newline at end of file From webhook-mailer at python.org Tue Jun 23 14:45:34 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Tue, 23 Jun 2020 18:45:34 -0000 Subject: [Python-checkins] On path with known exact float, extract the double with the fast macro. (GH-21072) Message-ID: https://github.com/python/cpython/commit/930f4518aea7f3f0f914ce93c3fb92831a7e1d2a commit: 930f4518aea7f3f0f914ce93c3fb92831a7e1d2a branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-23T11:45:25-07:00 summary: On path with known exact float, extract the double with the fast macro. (GH-21072) files: M Modules/mathmodule.c diff --git a/Modules/mathmodule.c b/Modules/mathmodule.c index 4450ce1894102..411c6eb1935fa 100644 --- a/Modules/mathmodule.c +++ b/Modules/mathmodule.c @@ -1256,9 +1256,15 @@ static PyObject * math_floor(PyObject *module, PyObject *number) /*[clinic end generated code: output=c6a65c4884884b8a input=63af6b5d7ebcc3d6]*/ { + double x; + _Py_IDENTIFIER(__floor__); - if (!PyFloat_CheckExact(number)) { + if (PyFloat_CheckExact(number)) { + x = PyFloat_AS_DOUBLE(number); + } + else + { PyObject *method = _PyObject_LookupSpecial(number, &PyId___floor__); if (method != NULL) { PyObject *result = _PyObject_CallNoArg(method); @@ -1267,11 +1273,10 @@ math_floor(PyObject *module, PyObject *number) } if (PyErr_Occurred()) return NULL; + x = PyFloat_AsDouble(number); + if (x == -1.0 && PyErr_Occurred()) + return NULL; } - double x = PyFloat_AsDouble(number); - if (x == -1.0 && PyErr_Occurred()) - return NULL; - return PyLong_FromDouble(floor(x)); } From webhook-mailer at python.org Tue Jun 23 15:06:06 2020 From: webhook-mailer at python.org (Nikita Nemkin) Date: Tue, 23 Jun 2020 19:06:06 -0000 Subject: [Python-checkins] bpo-41054: Simplify resource compilation on Windows (GH-21004) Message-ID: https://github.com/python/cpython/commit/4efc3360c9a83d5891f27ed67b4f0ab7275d2ab4 commit: 4efc3360c9a83d5891f27ed67b4f0ab7275d2ab4 branch: master author: Nikita Nemkin committer: GitHub date: 2020-06-23T20:05:57+01:00 summary: bpo-41054: Simplify resource compilation on Windows (GH-21004) Remove auto-generated resource header. Pass definitions required by resource files (ORIGINAL_FILENAME and FIELD3) directly to resource compiler. Remove unused MS_DLL_ID resource string and related dead code. files: M PC/dl_nt.c M PC/python_nt.rc M PC/python_ver_rc.h M PCbuild/pyproject.props diff --git a/PC/dl_nt.c b/PC/dl_nt.c index 0bf04f1bf3d79..7f17ee168727f 100644 --- a/PC/dl_nt.c +++ b/PC/dl_nt.c @@ -12,16 +12,10 @@ forgotten) from the programmer. #include "windows.h" #ifdef Py_ENABLE_SHARED -#ifdef MS_DLL_ID -// The string is available at build, so fill the buffer immediately -char dllVersionBuffer[16] = MS_DLL_ID; -#else -char dllVersionBuffer[16] = ""; // a private buffer -#endif // Python Globals HMODULE PyWin_DLLhModule = NULL; -const char *PyWin_DLLVersionString = dllVersionBuffer; +const char *PyWin_DLLVersionString = MS_DLL_ID; BOOL WINAPI DllMain (HANDLE hInst, ULONG ul_reason_for_call, @@ -31,11 +25,6 @@ BOOL WINAPI DllMain (HANDLE hInst, { case DLL_PROCESS_ATTACH: PyWin_DLLhModule = hInst; -#ifndef MS_DLL_ID - // If we have MS_DLL_ID, we don't need to load the string. - // 1000 is a magic number I picked out of the air. Could do with a #define, I spose... - LoadString(hInst, 1000, dllVersionBuffer, sizeof(dllVersionBuffer)); -#endif break; case DLL_PROCESS_DETACH: diff --git a/PC/python_nt.rc b/PC/python_nt.rc index fac6105d8a772..b5fb58f26b202 100644 --- a/PC/python_nt.rc +++ b/PC/python_nt.rc @@ -7,12 +7,6 @@ #include 2 RT_MANIFEST "python.manifest" -// String Tables -STRINGTABLE DISCARDABLE -BEGIN - 1000, MS_DLL_ID -END - ///////////////////////////////////////////////////////////////////////////// // // Version @@ -40,7 +34,7 @@ BEGIN VALUE "FileVersion", PYTHON_VERSION VALUE "InternalName", "Python DLL\0" VALUE "LegalCopyright", PYTHON_COPYRIGHT "\0" - VALUE "OriginalFilename", PYTHON_DLL_NAME "\0" + VALUE "OriginalFilename", ORIGINAL_FILENAME "\0" VALUE "ProductName", "Python\0" VALUE "ProductVersion", PYTHON_VERSION END diff --git a/PC/python_ver_rc.h b/PC/python_ver_rc.h index f95e755bb8bd7..c318d4487ea1b 100644 --- a/PC/python_ver_rc.h +++ b/PC/python_ver_rc.h @@ -9,7 +9,6 @@ #define MS_WINDOWS #include "modsupport.h" #include "patchlevel.h" -#include #ifdef _DEBUG # define PYTHON_DEBUG_EXT "_d" #else diff --git a/PCbuild/pyproject.props b/PCbuild/pyproject.props index bb918b28e9018..94a01ff5ca8a0 100644 --- a/PCbuild/pyproject.props +++ b/PCbuild/pyproject.props @@ -80,7 +80,7 @@ $(PySourcePath)PC;$(PySourcePath)Include;$(IntDir);%(AdditionalIncludeDirectories) - $(_DebugPreprocessorDefinition)%(PreprocessorDefinitions) + ORIGINAL_FILENAME=\"$(TargetName)$(TargetExt)\";FIELD3=$(Field3Value);$(_DebugPreprocessorDefinition)%(PreprocessorDefinitions) 0x0409 @@ -95,21 +95,6 @@ - - - - - - - From webhook-mailer at python.org Tue Jun 23 15:33:31 2020 From: webhook-mailer at python.org (Nikita Nemkin) Date: Tue, 23 Jun 2020 19:33:31 -0000 Subject: [Python-checkins] bpo-41039: Simplify python3.dll build (GH-20989) Message-ID: https://github.com/python/cpython/commit/2c6e4e91c5a4d3f25908108f4ed32aba936df70c commit: 2c6e4e91c5a4d3f25908108f4ed32aba936df70c branch: master author: Nikita Nemkin committer: GitHub date: 2020-06-23T20:33:23+01:00 summary: bpo-41039: Simplify python3.dll build (GH-20989) Use linker comment #pragma and preprocessor for re-exporting stable API functions and variables. Module definition file, custom build targets and entry point code become unnecessary and can be removed. This change also fixes missing _PyErr_BadInternalCall export on x86. files: A Misc/NEWS.d/next/Windows/2020-06-23-03-12-57.bpo-41039.0hgd0s.rst D PC/python3.def M PC/python3dll.c M PCbuild/python3dll.vcxproj M PCbuild/python3dll.vcxproj.filters diff --git a/Misc/NEWS.d/next/Windows/2020-06-23-03-12-57.bpo-41039.0hgd0s.rst b/Misc/NEWS.d/next/Windows/2020-06-23-03-12-57.bpo-41039.0hgd0s.rst new file mode 100644 index 0000000000000..acc3f7441f1b1 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-23-03-12-57.bpo-41039.0hgd0s.rst @@ -0,0 +1,2 @@ +Stable ABI redirection DLL (python3.dll) now uses ``#pragma +comment(linker)`` for re-exporting. diff --git a/PC/python3.def b/PC/python3.def deleted file mode 100644 index 2a6aaf4331ea5..0000000000000 --- a/PC/python3.def +++ /dev/null @@ -1,804 +0,0 @@ -; This file specifies the import forwarding for python3.dll -; It is used when building python3dll.vcxproj -LIBRARY "python3" -EXPORTS - PyArg_Parse=python310.PyArg_Parse - PyArg_ParseTuple=python310.PyArg_ParseTuple - PyArg_ParseTupleAndKeywords=python310.PyArg_ParseTupleAndKeywords - PyArg_UnpackTuple=python310.PyArg_UnpackTuple - PyArg_VaParse=python310.PyArg_VaParse - PyArg_VaParseTupleAndKeywords=python310.PyArg_VaParseTupleAndKeywords - PyArg_ValidateKeywordArguments=python310.PyArg_ValidateKeywordArguments - PyBaseObject_Type=python310.PyBaseObject_Type DATA - PyBool_FromLong=python310.PyBool_FromLong - PyBool_Type=python310.PyBool_Type DATA - PyByteArrayIter_Type=python310.PyByteArrayIter_Type DATA - PyByteArray_AsString=python310.PyByteArray_AsString - PyByteArray_Concat=python310.PyByteArray_Concat - PyByteArray_FromObject=python310.PyByteArray_FromObject - PyByteArray_FromStringAndSize=python310.PyByteArray_FromStringAndSize - PyByteArray_Resize=python310.PyByteArray_Resize - PyByteArray_Size=python310.PyByteArray_Size - PyByteArray_Type=python310.PyByteArray_Type DATA - PyBytesIter_Type=python310.PyBytesIter_Type DATA - PyBytes_AsString=python310.PyBytes_AsString - PyBytes_AsStringAndSize=python310.PyBytes_AsStringAndSize - PyBytes_Concat=python310.PyBytes_Concat - PyBytes_ConcatAndDel=python310.PyBytes_ConcatAndDel - PyBytes_DecodeEscape=python310.PyBytes_DecodeEscape - PyBytes_FromFormat=python310.PyBytes_FromFormat - PyBytes_FromFormatV=python310.PyBytes_FromFormatV - PyBytes_FromObject=python310.PyBytes_FromObject - PyBytes_FromString=python310.PyBytes_FromString - PyBytes_FromStringAndSize=python310.PyBytes_FromStringAndSize - PyBytes_Repr=python310.PyBytes_Repr - PyBytes_Size=python310.PyBytes_Size - PyBytes_Type=python310.PyBytes_Type DATA - PyCFunction_Call=python310.PyCFunction_Call - PyCFunction_GetFlags=python310.PyCFunction_GetFlags - PyCFunction_GetFunction=python310.PyCFunction_GetFunction - PyCFunction_GetSelf=python310.PyCFunction_GetSelf - PyCFunction_New=python310.PyCFunction_New - PyCFunction_NewEx=python310.PyCFunction_NewEx - PyCFunction_Type=python310.PyCFunction_Type DATA - PyCallIter_New=python310.PyCallIter_New - PyCallIter_Type=python310.PyCallIter_Type DATA - PyCallable_Check=python310.PyCallable_Check - PyCapsule_GetContext=python310.PyCapsule_GetContext - PyCapsule_GetDestructor=python310.PyCapsule_GetDestructor - PyCapsule_GetName=python310.PyCapsule_GetName - PyCapsule_GetPointer=python310.PyCapsule_GetPointer - PyCapsule_Import=python310.PyCapsule_Import - PyCapsule_IsValid=python310.PyCapsule_IsValid - PyCapsule_New=python310.PyCapsule_New - PyCapsule_SetContext=python310.PyCapsule_SetContext - PyCapsule_SetDestructor=python310.PyCapsule_SetDestructor - PyCapsule_SetName=python310.PyCapsule_SetName - PyCapsule_SetPointer=python310.PyCapsule_SetPointer - PyCapsule_Type=python310.PyCapsule_Type DATA - PyClassMethodDescr_Type=python310.PyClassMethodDescr_Type DATA - PyCodec_BackslashReplaceErrors=python310.PyCodec_BackslashReplaceErrors - PyCodec_Decode=python310.PyCodec_Decode - PyCodec_Decoder=python310.PyCodec_Decoder - PyCodec_Encode=python310.PyCodec_Encode - PyCodec_Encoder=python310.PyCodec_Encoder - PyCodec_IgnoreErrors=python310.PyCodec_IgnoreErrors - PyCodec_IncrementalDecoder=python310.PyCodec_IncrementalDecoder - PyCodec_IncrementalEncoder=python310.PyCodec_IncrementalEncoder - PyCodec_KnownEncoding=python310.PyCodec_KnownEncoding - PyCodec_LookupError=python310.PyCodec_LookupError - PyCodec_NameReplaceErrors=python310.PyCodec_NameReplaceErrors - PyCodec_Register=python310.PyCodec_Register - PyCodec_RegisterError=python310.PyCodec_RegisterError - PyCodec_ReplaceErrors=python310.PyCodec_ReplaceErrors - PyCodec_StreamReader=python310.PyCodec_StreamReader - PyCodec_StreamWriter=python310.PyCodec_StreamWriter - PyCodec_StrictErrors=python310.PyCodec_StrictErrors - PyCodec_XMLCharRefReplaceErrors=python310.PyCodec_XMLCharRefReplaceErrors - PyComplex_FromDoubles=python310.PyComplex_FromDoubles - PyComplex_ImagAsDouble=python310.PyComplex_ImagAsDouble - PyComplex_RealAsDouble=python310.PyComplex_RealAsDouble - PyComplex_Type=python310.PyComplex_Type DATA - PyDescr_NewClassMethod=python310.PyDescr_NewClassMethod - PyDescr_NewGetSet=python310.PyDescr_NewGetSet - PyDescr_NewMember=python310.PyDescr_NewMember - PyDescr_NewMethod=python310.PyDescr_NewMethod - PyDictItems_Type=python310.PyDictItems_Type DATA - PyDictIterItem_Type=python310.PyDictIterItem_Type DATA - PyDictIterKey_Type=python310.PyDictIterKey_Type DATA - PyDictIterValue_Type=python310.PyDictIterValue_Type DATA - PyDictKeys_Type=python310.PyDictKeys_Type DATA - PyDictProxy_New=python310.PyDictProxy_New - PyDictProxy_Type=python310.PyDictProxy_Type DATA - PyDictValues_Type=python310.PyDictValues_Type DATA - PyDict_Clear=python310.PyDict_Clear - PyDict_Contains=python310.PyDict_Contains - PyDict_Copy=python310.PyDict_Copy - PyDict_DelItem=python310.PyDict_DelItem - PyDict_DelItemString=python310.PyDict_DelItemString - PyDict_GetItem=python310.PyDict_GetItem - PyDict_GetItemString=python310.PyDict_GetItemString - PyDict_GetItemWithError=python310.PyDict_GetItemWithError - PyDict_Items=python310.PyDict_Items - PyDict_Keys=python310.PyDict_Keys - PyDict_Merge=python310.PyDict_Merge - PyDict_MergeFromSeq2=python310.PyDict_MergeFromSeq2 - PyDict_New=python310.PyDict_New - PyDict_Next=python310.PyDict_Next - PyDict_SetItem=python310.PyDict_SetItem - PyDict_SetItemString=python310.PyDict_SetItemString - PyDict_Size=python310.PyDict_Size - PyDict_Type=python310.PyDict_Type DATA - PyDict_Update=python310.PyDict_Update - PyDict_Values=python310.PyDict_Values - PyEllipsis_Type=python310.PyEllipsis_Type DATA - PyEnum_Type=python310.PyEnum_Type DATA - PyErr_BadArgument=python310.PyErr_BadArgument - PyErr_BadInternalCall=python310.PyErr_BadInternalCall - PyErr_CheckSignals=python310.PyErr_CheckSignals - PyErr_Clear=python310.PyErr_Clear - PyErr_Display=python310.PyErr_Display - PyErr_ExceptionMatches=python310.PyErr_ExceptionMatches - PyErr_Fetch=python310.PyErr_Fetch - PyErr_Format=python310.PyErr_Format - PyErr_FormatV=python310.PyErr_FormatV - PyErr_GetExcInfo=python310.PyErr_GetExcInfo - PyErr_GivenExceptionMatches=python310.PyErr_GivenExceptionMatches - PyErr_NewException=python310.PyErr_NewException - PyErr_NewExceptionWithDoc=python310.PyErr_NewExceptionWithDoc - PyErr_NoMemory=python310.PyErr_NoMemory - PyErr_NormalizeException=python310.PyErr_NormalizeException - PyErr_Occurred=python310.PyErr_Occurred - PyErr_Print=python310.PyErr_Print - PyErr_PrintEx=python310.PyErr_PrintEx - PyErr_ProgramText=python310.PyErr_ProgramText - PyErr_ResourceWarning=python310.PyErr_ResourceWarning - PyErr_Restore=python310.PyErr_Restore - PyErr_SetExcFromWindowsErr=python310.PyErr_SetExcFromWindowsErr - PyErr_SetExcFromWindowsErrWithFilename=python310.PyErr_SetExcFromWindowsErrWithFilename - PyErr_SetExcFromWindowsErrWithFilenameObject=python310.PyErr_SetExcFromWindowsErrWithFilenameObject - PyErr_SetExcFromWindowsErrWithFilenameObjects=python310.PyErr_SetExcFromWindowsErrWithFilenameObjects - PyErr_SetExcInfo=python310.PyErr_SetExcInfo - PyErr_SetFromErrno=python310.PyErr_SetFromErrno - PyErr_SetFromErrnoWithFilename=python310.PyErr_SetFromErrnoWithFilename - PyErr_SetFromErrnoWithFilenameObject=python310.PyErr_SetFromErrnoWithFilenameObject - PyErr_SetFromErrnoWithFilenameObjects=python310.PyErr_SetFromErrnoWithFilenameObjects - PyErr_SetFromWindowsErr=python310.PyErr_SetFromWindowsErr - PyErr_SetFromWindowsErrWithFilename=python310.PyErr_SetFromWindowsErrWithFilename - PyErr_SetImportError=python310.PyErr_SetImportError - PyErr_SetImportErrorSubclass=python310.PyErr_SetImportErrorSubclass - PyErr_SetInterrupt=python310.PyErr_SetInterrupt - PyErr_SetNone=python310.PyErr_SetNone - PyErr_SetObject=python310.PyErr_SetObject - PyErr_SetString=python310.PyErr_SetString - PyErr_SyntaxLocation=python310.PyErr_SyntaxLocation - PyErr_SyntaxLocationEx=python310.PyErr_SyntaxLocationEx - PyErr_WarnEx=python310.PyErr_WarnEx - PyErr_WarnExplicit=python310.PyErr_WarnExplicit - PyErr_WarnFormat=python310.PyErr_WarnFormat - PyErr_WriteUnraisable=python310.PyErr_WriteUnraisable - PyEval_AcquireLock=python310.PyEval_AcquireLock - PyEval_AcquireThread=python310.PyEval_AcquireThread - PyEval_CallFunction=python310.PyEval_CallFunction - PyEval_CallMethod=python310.PyEval_CallMethod - PyEval_CallObjectWithKeywords=python310.PyEval_CallObjectWithKeywords - PyEval_EvalCode=python310.PyEval_EvalCode - PyEval_EvalCodeEx=python310.PyEval_EvalCodeEx - PyEval_EvalFrame=python310.PyEval_EvalFrame - PyEval_EvalFrameEx=python310.PyEval_EvalFrameEx - PyEval_GetBuiltins=python310.PyEval_GetBuiltins - PyEval_GetCallStats=python310.PyEval_GetCallStats - PyEval_GetFrame=python310.PyEval_GetFrame - PyEval_GetFuncDesc=python310.PyEval_GetFuncDesc - PyEval_GetFuncName=python310.PyEval_GetFuncName - PyEval_GetGlobals=python310.PyEval_GetGlobals - PyEval_GetLocals=python310.PyEval_GetLocals - PyEval_InitThreads=python310.PyEval_InitThreads - PyEval_ReInitThreads=python310.PyEval_ReInitThreads - PyEval_ReleaseLock=python310.PyEval_ReleaseLock - PyEval_ReleaseThread=python310.PyEval_ReleaseThread - PyEval_RestoreThread=python310.PyEval_RestoreThread - PyEval_SaveThread=python310.PyEval_SaveThread - PyEval_ThreadsInitialized=python310.PyEval_ThreadsInitialized - PyExc_ArithmeticError=python310.PyExc_ArithmeticError DATA - PyExc_AssertionError=python310.PyExc_AssertionError DATA - PyExc_AttributeError=python310.PyExc_AttributeError DATA - PyExc_BaseException=python310.PyExc_BaseException DATA - PyExc_BlockingIOError=python310.PyExc_BlockingIOError DATA - PyExc_BrokenPipeError=python310.PyExc_BrokenPipeError DATA - PyExc_BufferError=python310.PyExc_BufferError DATA - PyExc_BytesWarning=python310.PyExc_BytesWarning DATA - PyExc_ChildProcessError=python310.PyExc_ChildProcessError DATA - PyExc_ConnectionAbortedError=python310.PyExc_ConnectionAbortedError DATA - PyExc_ConnectionError=python310.PyExc_ConnectionError DATA - PyExc_ConnectionRefusedError=python310.PyExc_ConnectionRefusedError DATA - PyExc_ConnectionResetError=python310.PyExc_ConnectionResetError DATA - PyExc_DeprecationWarning=python310.PyExc_DeprecationWarning DATA - PyExc_EOFError=python310.PyExc_EOFError DATA - PyExc_EnvironmentError=python310.PyExc_EnvironmentError DATA - PyExc_Exception=python310.PyExc_Exception DATA - PyExc_FileExistsError=python310.PyExc_FileExistsError DATA - PyExc_FileNotFoundError=python310.PyExc_FileNotFoundError DATA - PyExc_FloatingPointError=python310.PyExc_FloatingPointError DATA - PyExc_FutureWarning=python310.PyExc_FutureWarning DATA - PyExc_GeneratorExit=python310.PyExc_GeneratorExit DATA - PyExc_IOError=python310.PyExc_IOError DATA - PyExc_ImportError=python310.PyExc_ImportError DATA - PyExc_ImportWarning=python310.PyExc_ImportWarning DATA - PyExc_IndentationError=python310.PyExc_IndentationError DATA - PyExc_IndexError=python310.PyExc_IndexError DATA - PyExc_InterruptedError=python310.PyExc_InterruptedError DATA - PyExc_IsADirectoryError=python310.PyExc_IsADirectoryError DATA - PyExc_KeyError=python310.PyExc_KeyError DATA - PyExc_KeyboardInterrupt=python310.PyExc_KeyboardInterrupt DATA - PyExc_LookupError=python310.PyExc_LookupError DATA - PyExc_MemoryError=python310.PyExc_MemoryError DATA - PyExc_ModuleNotFoundError=python310.PyExc_ModuleNotFoundError DATA - PyExc_NameError=python310.PyExc_NameError DATA - PyExc_NotADirectoryError=python310.PyExc_NotADirectoryError DATA - PyExc_NotImplementedError=python310.PyExc_NotImplementedError DATA - PyExc_OSError=python310.PyExc_OSError DATA - PyExc_OverflowError=python310.PyExc_OverflowError DATA - PyExc_PendingDeprecationWarning=python310.PyExc_PendingDeprecationWarning DATA - PyExc_PermissionError=python310.PyExc_PermissionError DATA - PyExc_ProcessLookupError=python310.PyExc_ProcessLookupError DATA - PyExc_RecursionError=python310.PyExc_RecursionError DATA - PyExc_ReferenceError=python310.PyExc_ReferenceError DATA - PyExc_ResourceWarning=python310.PyExc_ResourceWarning DATA - PyExc_RuntimeError=python310.PyExc_RuntimeError DATA - PyExc_RuntimeWarning=python310.PyExc_RuntimeWarning DATA - PyExc_StopAsyncIteration=python310.PyExc_StopAsyncIteration DATA - PyExc_StopIteration=python310.PyExc_StopIteration DATA - PyExc_SyntaxError=python310.PyExc_SyntaxError DATA - PyExc_SyntaxWarning=python310.PyExc_SyntaxWarning DATA - PyExc_SystemError=python310.PyExc_SystemError DATA - PyExc_SystemExit=python310.PyExc_SystemExit DATA - PyExc_TabError=python310.PyExc_TabError DATA - PyExc_TimeoutError=python310.PyExc_TimeoutError DATA - PyExc_TypeError=python310.PyExc_TypeError DATA - PyExc_UnboundLocalError=python310.PyExc_UnboundLocalError DATA - PyExc_UnicodeDecodeError=python310.PyExc_UnicodeDecodeError DATA - PyExc_UnicodeEncodeError=python310.PyExc_UnicodeEncodeError DATA - PyExc_UnicodeError=python310.PyExc_UnicodeError DATA - PyExc_UnicodeTranslateError=python310.PyExc_UnicodeTranslateError DATA - PyExc_UnicodeWarning=python310.PyExc_UnicodeWarning DATA - PyExc_UserWarning=python310.PyExc_UserWarning DATA - PyExc_ValueError=python310.PyExc_ValueError DATA - PyExc_Warning=python310.PyExc_Warning DATA - PyExc_WindowsError=python310.PyExc_WindowsError DATA - PyExc_ZeroDivisionError=python310.PyExc_ZeroDivisionError DATA - PyExceptionClass_Name=python310.PyExceptionClass_Name - PyException_GetCause=python310.PyException_GetCause - PyException_GetContext=python310.PyException_GetContext - PyException_GetTraceback=python310.PyException_GetTraceback - PyException_SetCause=python310.PyException_SetCause - PyException_SetContext=python310.PyException_SetContext - PyException_SetTraceback=python310.PyException_SetTraceback - PyFile_FromFd=python310.PyFile_FromFd - PyFile_GetLine=python310.PyFile_GetLine - PyFile_WriteObject=python310.PyFile_WriteObject - PyFile_WriteString=python310.PyFile_WriteString - PyFilter_Type=python310.PyFilter_Type DATA - PyFloat_AsDouble=python310.PyFloat_AsDouble - PyFloat_FromDouble=python310.PyFloat_FromDouble - PyFloat_FromString=python310.PyFloat_FromString - PyFloat_GetInfo=python310.PyFloat_GetInfo - PyFloat_GetMax=python310.PyFloat_GetMax - PyFloat_GetMin=python310.PyFloat_GetMin - PyFloat_Type=python310.PyFloat_Type DATA - PyFrozenSet_New=python310.PyFrozenSet_New - PyFrozenSet_Type=python310.PyFrozenSet_Type DATA - PyGC_Collect=python310.PyGC_Collect - PyGILState_Ensure=python310.PyGILState_Ensure - PyGILState_GetThisThreadState=python310.PyGILState_GetThisThreadState - PyGILState_Release=python310.PyGILState_Release - PyGetSetDescr_Type=python310.PyGetSetDescr_Type DATA - PyImport_AddModule=python310.PyImport_AddModule - PyImport_AddModuleObject=python310.PyImport_AddModuleObject - PyImport_AppendInittab=python310.PyImport_AppendInittab - PyImport_Cleanup=python310.PyImport_Cleanup - PyImport_ExecCodeModule=python310.PyImport_ExecCodeModule - PyImport_ExecCodeModuleEx=python310.PyImport_ExecCodeModuleEx - PyImport_ExecCodeModuleObject=python310.PyImport_ExecCodeModuleObject - PyImport_ExecCodeModuleWithPathnames=python310.PyImport_ExecCodeModuleWithPathnames - PyImport_GetImporter=python310.PyImport_GetImporter - PyImport_GetMagicNumber=python310.PyImport_GetMagicNumber - PyImport_GetMagicTag=python310.PyImport_GetMagicTag - PyImport_GetModule=python310.PyImport_GetModule - PyImport_GetModuleDict=python310.PyImport_GetModuleDict - PyImport_Import=python310.PyImport_Import - PyImport_ImportFrozenModule=python310.PyImport_ImportFrozenModule - PyImport_ImportFrozenModuleObject=python310.PyImport_ImportFrozenModuleObject - PyImport_ImportModule=python310.PyImport_ImportModule - PyImport_ImportModuleLevel=python310.PyImport_ImportModuleLevel - PyImport_ImportModuleLevelObject=python310.PyImport_ImportModuleLevelObject - PyImport_ImportModuleNoBlock=python310.PyImport_ImportModuleNoBlock - PyImport_ReloadModule=python310.PyImport_ReloadModule - PyIndex_Check=python310.PyIndex_Check - PyInterpreterState_Clear=python310.PyInterpreterState_Clear - PyInterpreterState_Delete=python310.PyInterpreterState_Delete - PyInterpreterState_New=python310.PyInterpreterState_New - PyIter_Check=python310.PyIter_Check - PyIter_Next=python310.PyIter_Next - PyListIter_Type=python310.PyListIter_Type DATA - PyListRevIter_Type=python310.PyListRevIter_Type DATA - PyList_Append=python310.PyList_Append - PyList_AsTuple=python310.PyList_AsTuple - PyList_GetItem=python310.PyList_GetItem - PyList_GetSlice=python310.PyList_GetSlice - PyList_Insert=python310.PyList_Insert - PyList_New=python310.PyList_New - PyList_Reverse=python310.PyList_Reverse - PyList_SetItem=python310.PyList_SetItem - PyList_SetSlice=python310.PyList_SetSlice - PyList_Size=python310.PyList_Size - PyList_Sort=python310.PyList_Sort - PyList_Type=python310.PyList_Type DATA - PyLongRangeIter_Type=python310.PyLongRangeIter_Type DATA - PyLong_AsDouble=python310.PyLong_AsDouble - PyLong_AsLong=python310.PyLong_AsLong - PyLong_AsLongAndOverflow=python310.PyLong_AsLongAndOverflow - PyLong_AsLongLong=python310.PyLong_AsLongLong - PyLong_AsLongLongAndOverflow=python310.PyLong_AsLongLongAndOverflow - PyLong_AsSize_t=python310.PyLong_AsSize_t - PyLong_AsSsize_t=python310.PyLong_AsSsize_t - PyLong_AsUnsignedLong=python310.PyLong_AsUnsignedLong - PyLong_AsUnsignedLongLong=python310.PyLong_AsUnsignedLongLong - PyLong_AsUnsignedLongLongMask=python310.PyLong_AsUnsignedLongLongMask - PyLong_AsUnsignedLongMask=python310.PyLong_AsUnsignedLongMask - PyLong_AsVoidPtr=python310.PyLong_AsVoidPtr - PyLong_FromDouble=python310.PyLong_FromDouble - PyLong_FromLong=python310.PyLong_FromLong - PyLong_FromLongLong=python310.PyLong_FromLongLong - PyLong_FromSize_t=python310.PyLong_FromSize_t - PyLong_FromSsize_t=python310.PyLong_FromSsize_t - PyLong_FromString=python310.PyLong_FromString - PyLong_FromUnsignedLong=python310.PyLong_FromUnsignedLong - PyLong_FromUnsignedLongLong=python310.PyLong_FromUnsignedLongLong - PyLong_FromVoidPtr=python310.PyLong_FromVoidPtr - PyLong_GetInfo=python310.PyLong_GetInfo - PyLong_Type=python310.PyLong_Type DATA - PyMap_Type=python310.PyMap_Type DATA - PyMapping_Check=python310.PyMapping_Check - PyMapping_GetItemString=python310.PyMapping_GetItemString - PyMapping_HasKey=python310.PyMapping_HasKey - PyMapping_HasKeyString=python310.PyMapping_HasKeyString - PyMapping_Items=python310.PyMapping_Items - PyMapping_Keys=python310.PyMapping_Keys - PyMapping_Length=python310.PyMapping_Length - PyMapping_SetItemString=python310.PyMapping_SetItemString - PyMapping_Size=python310.PyMapping_Size - PyMapping_Values=python310.PyMapping_Values - PyMem_Calloc=python310.PyMem_Calloc - PyMem_Free=python310.PyMem_Free - PyMem_Malloc=python310.PyMem_Malloc - PyMem_Realloc=python310.PyMem_Realloc - PyMemberDescr_Type=python310.PyMemberDescr_Type DATA - PyMemoryView_FromMemory=python310.PyMemoryView_FromMemory - PyMemoryView_FromObject=python310.PyMemoryView_FromObject - PyMemoryView_GetContiguous=python310.PyMemoryView_GetContiguous - PyMemoryView_Type=python310.PyMemoryView_Type DATA - PyMethodDescr_Type=python310.PyMethodDescr_Type DATA - PyModuleDef_Init=python310.PyModuleDef_Init - PyModuleDef_Type=python310.PyModuleDef_Type DATA - PyModule_AddFunctions=python310.PyModule_AddFunctions - PyModule_AddIntConstant=python310.PyModule_AddIntConstant - PyModule_AddObject=python310.PyModule_AddObject - PyModule_AddStringConstant=python310.PyModule_AddStringConstant - PyModule_Create2=python310.PyModule_Create2 - PyModule_ExecDef=python310.PyModule_ExecDef - PyModule_FromDefAndSpec2=python310.PyModule_FromDefAndSpec2 - PyModule_GetDef=python310.PyModule_GetDef - PyModule_GetDict=python310.PyModule_GetDict - PyModule_GetFilename=python310.PyModule_GetFilename - PyModule_GetFilenameObject=python310.PyModule_GetFilenameObject - PyModule_GetName=python310.PyModule_GetName - PyModule_GetNameObject=python310.PyModule_GetNameObject - PyModule_GetState=python310.PyModule_GetState - PyModule_New=python310.PyModule_New - PyModule_NewObject=python310.PyModule_NewObject - PyModule_SetDocString=python310.PyModule_SetDocString - PyModule_Type=python310.PyModule_Type DATA - PyNullImporter_Type=python310.PyNullImporter_Type DATA - PyNumber_Absolute=python310.PyNumber_Absolute - PyNumber_Add=python310.PyNumber_Add - PyNumber_And=python310.PyNumber_And - PyNumber_AsSsize_t=python310.PyNumber_AsSsize_t - PyNumber_Check=python310.PyNumber_Check - PyNumber_Divmod=python310.PyNumber_Divmod - PyNumber_Float=python310.PyNumber_Float - PyNumber_FloorDivide=python310.PyNumber_FloorDivide - PyNumber_InPlaceAdd=python310.PyNumber_InPlaceAdd - PyNumber_InPlaceAnd=python310.PyNumber_InPlaceAnd - PyNumber_InPlaceFloorDivide=python310.PyNumber_InPlaceFloorDivide - PyNumber_InPlaceLshift=python310.PyNumber_InPlaceLshift - PyNumber_InPlaceMatrixMultiply=python310.PyNumber_InPlaceMatrixMultiply - PyNumber_InPlaceMultiply=python310.PyNumber_InPlaceMultiply - PyNumber_InPlaceOr=python310.PyNumber_InPlaceOr - PyNumber_InPlacePower=python310.PyNumber_InPlacePower - PyNumber_InPlaceRemainder=python310.PyNumber_InPlaceRemainder - PyNumber_InPlaceRshift=python310.PyNumber_InPlaceRshift - PyNumber_InPlaceSubtract=python310.PyNumber_InPlaceSubtract - PyNumber_InPlaceTrueDivide=python310.PyNumber_InPlaceTrueDivide - PyNumber_InPlaceXor=python310.PyNumber_InPlaceXor - PyNumber_Index=python310.PyNumber_Index - PyNumber_Invert=python310.PyNumber_Invert - PyNumber_Long=python310.PyNumber_Long - PyNumber_Lshift=python310.PyNumber_Lshift - PyNumber_MatrixMultiply=python310.PyNumber_MatrixMultiply - PyNumber_Multiply=python310.PyNumber_Multiply - PyNumber_Negative=python310.PyNumber_Negative - PyNumber_Or=python310.PyNumber_Or - PyNumber_Positive=python310.PyNumber_Positive - PyNumber_Power=python310.PyNumber_Power - PyNumber_Remainder=python310.PyNumber_Remainder - PyNumber_Rshift=python310.PyNumber_Rshift - PyNumber_Subtract=python310.PyNumber_Subtract - PyNumber_ToBase=python310.PyNumber_ToBase - PyNumber_TrueDivide=python310.PyNumber_TrueDivide - PyNumber_Xor=python310.PyNumber_Xor - PyODictItems_Type=python310.PyODictItems_Type DATA - PyODictIter_Type=python310.PyODictIter_Type DATA - PyODictKeys_Type=python310.PyODictKeys_Type DATA - PyODictValues_Type=python310.PyODictValues_Type DATA - PyODict_DelItem=python310.PyODict_DelItem - PyODict_New=python310.PyODict_New - PyODict_SetItem=python310.PyODict_SetItem - PyODict_Type=python310.PyODict_Type DATA - PyOS_AfterFork=python310.PyOS_AfterFork - PyOS_CheckStack=python310.PyOS_CheckStack - PyOS_FSPath=python310.PyOS_FSPath - PyOS_InitInterrupts=python310.PyOS_InitInterrupts - PyOS_InputHook=python310.PyOS_InputHook DATA - PyOS_InterruptOccurred=python310.PyOS_InterruptOccurred - PyOS_ReadlineFunctionPointer=python310.PyOS_ReadlineFunctionPointer DATA - PyOS_double_to_string=python310.PyOS_double_to_string - PyOS_getsig=python310.PyOS_getsig - PyOS_mystricmp=python310.PyOS_mystricmp - PyOS_mystrnicmp=python310.PyOS_mystrnicmp - PyOS_setsig=python310.PyOS_setsig - PyOS_snprintf=python310.PyOS_snprintf - PyOS_string_to_double=python310.PyOS_string_to_double - PyOS_strtol=python310.PyOS_strtol - PyOS_strtoul=python310.PyOS_strtoul - PyOS_vsnprintf=python310.PyOS_vsnprintf - PyObject_ASCII=python310.PyObject_ASCII - PyObject_AsCharBuffer=python310.PyObject_AsCharBuffer - PyObject_AsFileDescriptor=python310.PyObject_AsFileDescriptor - PyObject_AsReadBuffer=python310.PyObject_AsReadBuffer - PyObject_AsWriteBuffer=python310.PyObject_AsWriteBuffer - PyObject_Bytes=python310.PyObject_Bytes - PyObject_Call=python310.PyObject_Call - PyObject_CallFunction=python310.PyObject_CallFunction - PyObject_CallFunctionObjArgs=python310.PyObject_CallFunctionObjArgs - PyObject_CallMethod=python310.PyObject_CallMethod - PyObject_CallMethodObjArgs=python310.PyObject_CallMethodObjArgs - PyObject_CallObject=python310.PyObject_CallObject - PyObject_Calloc=python310.PyObject_Calloc - PyObject_CheckReadBuffer=python310.PyObject_CheckReadBuffer - PyObject_ClearWeakRefs=python310.PyObject_ClearWeakRefs - PyObject_DelItem=python310.PyObject_DelItem - PyObject_DelItemString=python310.PyObject_DelItemString - PyObject_Dir=python310.PyObject_Dir - PyObject_Format=python310.PyObject_Format - PyObject_Free=python310.PyObject_Free - PyObject_GC_Del=python310.PyObject_GC_Del - PyObject_GC_Track=python310.PyObject_GC_Track - PyObject_GC_UnTrack=python310.PyObject_GC_UnTrack - PyObject_GenericGetAttr=python310.PyObject_GenericGetAttr - PyObject_GenericSetAttr=python310.PyObject_GenericSetAttr - PyObject_GenericSetDict=python310.PyObject_GenericSetDict - PyObject_GetAttr=python310.PyObject_GetAttr - PyObject_GetAttrString=python310.PyObject_GetAttrString - PyObject_GetItem=python310.PyObject_GetItem - PyObject_GetIter=python310.PyObject_GetIter - PyObject_HasAttr=python310.PyObject_HasAttr - PyObject_HasAttrString=python310.PyObject_HasAttrString - PyObject_Hash=python310.PyObject_Hash - PyObject_HashNotImplemented=python310.PyObject_HashNotImplemented - PyObject_Init=python310.PyObject_Init - PyObject_InitVar=python310.PyObject_InitVar - PyObject_IsInstance=python310.PyObject_IsInstance - PyObject_IsSubclass=python310.PyObject_IsSubclass - PyObject_IsTrue=python310.PyObject_IsTrue - PyObject_Length=python310.PyObject_Length - PyObject_Malloc=python310.PyObject_Malloc - PyObject_Not=python310.PyObject_Not - PyObject_Realloc=python310.PyObject_Realloc - PyObject_Repr=python310.PyObject_Repr - PyObject_RichCompare=python310.PyObject_RichCompare - PyObject_RichCompareBool=python310.PyObject_RichCompareBool - PyObject_SelfIter=python310.PyObject_SelfIter - PyObject_SetAttr=python310.PyObject_SetAttr - PyObject_SetAttrString=python310.PyObject_SetAttrString - PyObject_SetItem=python310.PyObject_SetItem - PyObject_Size=python310.PyObject_Size - PyObject_Str=python310.PyObject_Str - PyObject_Type=python310.PyObject_Type - PyParser_SimpleParseFileFlags=python310.PyParser_SimpleParseFileFlags - PyParser_SimpleParseStringFlags=python310.PyParser_SimpleParseStringFlags - PyParser_SimpleParseStringFlagsFilename=python310.PyParser_SimpleParseStringFlagsFilename - PyProperty_Type=python310.PyProperty_Type DATA - PyRangeIter_Type=python310.PyRangeIter_Type DATA - PyRange_Type=python310.PyRange_Type DATA - PyReversed_Type=python310.PyReversed_Type DATA - PySeqIter_New=python310.PySeqIter_New - PySeqIter_Type=python310.PySeqIter_Type DATA - PySequence_Check=python310.PySequence_Check - PySequence_Concat=python310.PySequence_Concat - PySequence_Contains=python310.PySequence_Contains - PySequence_Count=python310.PySequence_Count - PySequence_DelItem=python310.PySequence_DelItem - PySequence_DelSlice=python310.PySequence_DelSlice - PySequence_Fast=python310.PySequence_Fast - PySequence_GetItem=python310.PySequence_GetItem - PySequence_GetSlice=python310.PySequence_GetSlice - PySequence_In=python310.PySequence_In - PySequence_InPlaceConcat=python310.PySequence_InPlaceConcat - PySequence_InPlaceRepeat=python310.PySequence_InPlaceRepeat - PySequence_Index=python310.PySequence_Index - PySequence_Length=python310.PySequence_Length - PySequence_List=python310.PySequence_List - PySequence_Repeat=python310.PySequence_Repeat - PySequence_SetItem=python310.PySequence_SetItem - PySequence_SetSlice=python310.PySequence_SetSlice - PySequence_Size=python310.PySequence_Size - PySequence_Tuple=python310.PySequence_Tuple - PySetIter_Type=python310.PySetIter_Type DATA - PySet_Add=python310.PySet_Add - PySet_Clear=python310.PySet_Clear - PySet_Contains=python310.PySet_Contains - PySet_Discard=python310.PySet_Discard - PySet_New=python310.PySet_New - PySet_Pop=python310.PySet_Pop - PySet_Size=python310.PySet_Size - PySet_Type=python310.PySet_Type DATA - PySlice_AdjustIndices=python310.PySlice_AdjustIndices - PySlice_GetIndices=python310.PySlice_GetIndices - PySlice_GetIndicesEx=python310.PySlice_GetIndicesEx - PySlice_New=python310.PySlice_New - PySlice_Type=python310.PySlice_Type DATA - PySlice_Unpack=python310.PySlice_Unpack - PySortWrapper_Type=python310.PySortWrapper_Type DATA - PyInterpreterState_GetID=python310.PyInterpreterState_GetID - PyState_AddModule=python310.PyState_AddModule - PyState_FindModule=python310.PyState_FindModule - PyState_RemoveModule=python310.PyState_RemoveModule - PyStructSequence_GetItem=python310.PyStructSequence_GetItem - PyStructSequence_New=python310.PyStructSequence_New - PyStructSequence_NewType=python310.PyStructSequence_NewType - PyStructSequence_SetItem=python310.PyStructSequence_SetItem - PySuper_Type=python310.PySuper_Type DATA - PySys_AddWarnOption=python310.PySys_AddWarnOption - PySys_AddWarnOptionUnicode=python310.PySys_AddWarnOptionUnicode - PySys_AddXOption=python310.PySys_AddXOption - PySys_FormatStderr=python310.PySys_FormatStderr - PySys_FormatStdout=python310.PySys_FormatStdout - PySys_GetObject=python310.PySys_GetObject - PySys_GetXOptions=python310.PySys_GetXOptions - PySys_HasWarnOptions=python310.PySys_HasWarnOptions - PySys_ResetWarnOptions=python310.PySys_ResetWarnOptions - PySys_SetArgv=python310.PySys_SetArgv - PySys_SetArgvEx=python310.PySys_SetArgvEx - PySys_SetObject=python310.PySys_SetObject - PySys_SetPath=python310.PySys_SetPath - PySys_WriteStderr=python310.PySys_WriteStderr - PySys_WriteStdout=python310.PySys_WriteStdout - PyThreadState_Clear=python310.PyThreadState_Clear - PyThreadState_Delete=python310.PyThreadState_Delete - PyThreadState_DeleteCurrent=python310.PyThreadState_DeleteCurrent - PyThreadState_Get=python310.PyThreadState_Get - PyThreadState_GetDict=python310.PyThreadState_GetDict - PyThreadState_New=python310.PyThreadState_New - PyThreadState_SetAsyncExc=python310.PyThreadState_SetAsyncExc - PyThreadState_Swap=python310.PyThreadState_Swap - PyThread_tss_alloc=python310.PyThread_tss_alloc - PyThread_tss_create=python310.PyThread_tss_create - PyThread_tss_delete=python310.PyThread_tss_delete - PyThread_tss_free=python310.PyThread_tss_free - PyThread_tss_get=python310.PyThread_tss_get - PyThread_tss_is_created=python310.PyThread_tss_is_created - PyThread_tss_set=python310.PyThread_tss_set - PyTraceBack_Here=python310.PyTraceBack_Here - PyTraceBack_Print=python310.PyTraceBack_Print - PyTraceBack_Type=python310.PyTraceBack_Type DATA - PyTupleIter_Type=python310.PyTupleIter_Type DATA - PyTuple_GetItem=python310.PyTuple_GetItem - PyTuple_GetSlice=python310.PyTuple_GetSlice - PyTuple_New=python310.PyTuple_New - PyTuple_Pack=python310.PyTuple_Pack - PyTuple_SetItem=python310.PyTuple_SetItem - PyTuple_Size=python310.PyTuple_Size - PyTuple_Type=python310.PyTuple_Type DATA - PyType_ClearCache=python310.PyType_ClearCache - PyType_FromSpec=python310.PyType_FromSpec - PyType_FromSpecWithBases=python310.PyType_FromSpecWithBases - PyType_GenericAlloc=python310.PyType_GenericAlloc - PyType_GenericNew=python310.PyType_GenericNew - PyType_GetFlags=python310.PyType_GetFlags - PyType_GetSlot=python310.PyType_GetSlot - PyType_IsSubtype=python310.PyType_IsSubtype - PyType_Modified=python310.PyType_Modified - PyType_Ready=python310.PyType_Ready - PyType_Type=python310.PyType_Type DATA - PyUnicodeDecodeError_Create=python310.PyUnicodeDecodeError_Create - PyUnicodeDecodeError_GetEncoding=python310.PyUnicodeDecodeError_GetEncoding - PyUnicodeDecodeError_GetEnd=python310.PyUnicodeDecodeError_GetEnd - PyUnicodeDecodeError_GetObject=python310.PyUnicodeDecodeError_GetObject - PyUnicodeDecodeError_GetReason=python310.PyUnicodeDecodeError_GetReason - PyUnicodeDecodeError_GetStart=python310.PyUnicodeDecodeError_GetStart - PyUnicodeDecodeError_SetEnd=python310.PyUnicodeDecodeError_SetEnd - PyUnicodeDecodeError_SetReason=python310.PyUnicodeDecodeError_SetReason - PyUnicodeDecodeError_SetStart=python310.PyUnicodeDecodeError_SetStart - PyUnicodeEncodeError_GetEncoding=python310.PyUnicodeEncodeError_GetEncoding - PyUnicodeEncodeError_GetEnd=python310.PyUnicodeEncodeError_GetEnd - PyUnicodeEncodeError_GetObject=python310.PyUnicodeEncodeError_GetObject - PyUnicodeEncodeError_GetReason=python310.PyUnicodeEncodeError_GetReason - PyUnicodeEncodeError_GetStart=python310.PyUnicodeEncodeError_GetStart - PyUnicodeEncodeError_SetEnd=python310.PyUnicodeEncodeError_SetEnd - PyUnicodeEncodeError_SetReason=python310.PyUnicodeEncodeError_SetReason - PyUnicodeEncodeError_SetStart=python310.PyUnicodeEncodeError_SetStart - PyUnicodeIter_Type=python310.PyUnicodeIter_Type DATA - PyUnicodeTranslateError_GetEnd=python310.PyUnicodeTranslateError_GetEnd - PyUnicodeTranslateError_GetObject=python310.PyUnicodeTranslateError_GetObject - PyUnicodeTranslateError_GetReason=python310.PyUnicodeTranslateError_GetReason - PyUnicodeTranslateError_GetStart=python310.PyUnicodeTranslateError_GetStart - PyUnicodeTranslateError_SetEnd=python310.PyUnicodeTranslateError_SetEnd - PyUnicodeTranslateError_SetReason=python310.PyUnicodeTranslateError_SetReason - PyUnicodeTranslateError_SetStart=python310.PyUnicodeTranslateError_SetStart - PyUnicode_Append=python310.PyUnicode_Append - PyUnicode_AppendAndDel=python310.PyUnicode_AppendAndDel - PyUnicode_AsASCIIString=python310.PyUnicode_AsASCIIString - PyUnicode_AsCharmapString=python310.PyUnicode_AsCharmapString - PyUnicode_AsDecodedObject=python310.PyUnicode_AsDecodedObject - PyUnicode_AsDecodedUnicode=python310.PyUnicode_AsDecodedUnicode - PyUnicode_AsEncodedObject=python310.PyUnicode_AsEncodedObject - PyUnicode_AsEncodedString=python310.PyUnicode_AsEncodedString - PyUnicode_AsEncodedUnicode=python310.PyUnicode_AsEncodedUnicode - PyUnicode_AsLatin1String=python310.PyUnicode_AsLatin1String - PyUnicode_AsMBCSString=python310.PyUnicode_AsMBCSString - PyUnicode_AsRawUnicodeEscapeString=python310.PyUnicode_AsRawUnicodeEscapeString - PyUnicode_AsUCS4=python310.PyUnicode_AsUCS4 - PyUnicode_AsUCS4Copy=python310.PyUnicode_AsUCS4Copy - PyUnicode_AsUTF16String=python310.PyUnicode_AsUTF16String - PyUnicode_AsUTF32String=python310.PyUnicode_AsUTF32String - PyUnicode_AsUTF8String=python310.PyUnicode_AsUTF8String - PyUnicode_AsUnicodeEscapeString=python310.PyUnicode_AsUnicodeEscapeString - PyUnicode_AsWideChar=python310.PyUnicode_AsWideChar - PyUnicode_AsWideCharString=python310.PyUnicode_AsWideCharString - PyUnicode_BuildEncodingMap=python310.PyUnicode_BuildEncodingMap - PyUnicode_Compare=python310.PyUnicode_Compare - PyUnicode_CompareWithASCIIString=python310.PyUnicode_CompareWithASCIIString - PyUnicode_Concat=python310.PyUnicode_Concat - PyUnicode_Contains=python310.PyUnicode_Contains - PyUnicode_Count=python310.PyUnicode_Count - PyUnicode_Decode=python310.PyUnicode_Decode - PyUnicode_DecodeASCII=python310.PyUnicode_DecodeASCII - PyUnicode_DecodeCharmap=python310.PyUnicode_DecodeCharmap - PyUnicode_DecodeCodePageStateful=python310.PyUnicode_DecodeCodePageStateful - PyUnicode_DecodeFSDefault=python310.PyUnicode_DecodeFSDefault - PyUnicode_DecodeFSDefaultAndSize=python310.PyUnicode_DecodeFSDefaultAndSize - PyUnicode_DecodeLatin1=python310.PyUnicode_DecodeLatin1 - PyUnicode_DecodeLocale=python310.PyUnicode_DecodeLocale - PyUnicode_DecodeLocaleAndSize=python310.PyUnicode_DecodeLocaleAndSize - PyUnicode_DecodeMBCS=python310.PyUnicode_DecodeMBCS - PyUnicode_DecodeMBCSStateful=python310.PyUnicode_DecodeMBCSStateful - PyUnicode_DecodeRawUnicodeEscape=python310.PyUnicode_DecodeRawUnicodeEscape - PyUnicode_DecodeUTF16=python310.PyUnicode_DecodeUTF16 - PyUnicode_DecodeUTF16Stateful=python310.PyUnicode_DecodeUTF16Stateful - PyUnicode_DecodeUTF32=python310.PyUnicode_DecodeUTF32 - PyUnicode_DecodeUTF32Stateful=python310.PyUnicode_DecodeUTF32Stateful - PyUnicode_DecodeUTF7=python310.PyUnicode_DecodeUTF7 - PyUnicode_DecodeUTF7Stateful=python310.PyUnicode_DecodeUTF7Stateful - PyUnicode_DecodeUTF8=python310.PyUnicode_DecodeUTF8 - PyUnicode_DecodeUTF8Stateful=python310.PyUnicode_DecodeUTF8Stateful - PyUnicode_DecodeUnicodeEscape=python310.PyUnicode_DecodeUnicodeEscape - PyUnicode_EncodeCodePage=python310.PyUnicode_EncodeCodePage - PyUnicode_EncodeFSDefault=python310.PyUnicode_EncodeFSDefault - PyUnicode_EncodeLocale=python310.PyUnicode_EncodeLocale - PyUnicode_FSConverter=python310.PyUnicode_FSConverter - PyUnicode_FSDecoder=python310.PyUnicode_FSDecoder - PyUnicode_Find=python310.PyUnicode_Find - PyUnicode_FindChar=python310.PyUnicode_FindChar - PyUnicode_Format=python310.PyUnicode_Format - PyUnicode_FromEncodedObject=python310.PyUnicode_FromEncodedObject - PyUnicode_FromFormat=python310.PyUnicode_FromFormat - PyUnicode_FromFormatV=python310.PyUnicode_FromFormatV - PyUnicode_FromObject=python310.PyUnicode_FromObject - PyUnicode_FromOrdinal=python310.PyUnicode_FromOrdinal - PyUnicode_FromString=python310.PyUnicode_FromString - PyUnicode_FromStringAndSize=python310.PyUnicode_FromStringAndSize - PyUnicode_FromWideChar=python310.PyUnicode_FromWideChar - PyUnicode_GetDefaultEncoding=python310.PyUnicode_GetDefaultEncoding - PyUnicode_GetLength=python310.PyUnicode_GetLength - PyUnicode_GetSize=python310.PyUnicode_GetSize - PyUnicode_InternFromString=python310.PyUnicode_InternFromString - PyUnicode_InternImmortal=python310.PyUnicode_InternImmortal - PyUnicode_InternInPlace=python310.PyUnicode_InternInPlace - PyUnicode_IsIdentifier=python310.PyUnicode_IsIdentifier - PyUnicode_Join=python310.PyUnicode_Join - PyUnicode_Partition=python310.PyUnicode_Partition - PyUnicode_RPartition=python310.PyUnicode_RPartition - PyUnicode_RSplit=python310.PyUnicode_RSplit - PyUnicode_ReadChar=python310.PyUnicode_ReadChar - PyUnicode_Replace=python310.PyUnicode_Replace - PyUnicode_Resize=python310.PyUnicode_Resize - PyUnicode_RichCompare=python310.PyUnicode_RichCompare - PyUnicode_Split=python310.PyUnicode_Split - PyUnicode_Splitlines=python310.PyUnicode_Splitlines - PyUnicode_Substring=python310.PyUnicode_Substring - PyUnicode_Tailmatch=python310.PyUnicode_Tailmatch - PyUnicode_Translate=python310.PyUnicode_Translate - PyUnicode_Type=python310.PyUnicode_Type DATA - PyUnicode_WriteChar=python310.PyUnicode_WriteChar - PyWeakref_GetObject=python310.PyWeakref_GetObject - PyWeakref_NewProxy=python310.PyWeakref_NewProxy - PyWeakref_NewRef=python310.PyWeakref_NewRef - PyWrapperDescr_Type=python310.PyWrapperDescr_Type DATA - PyWrapper_New=python310.PyWrapper_New - PyZip_Type=python310.PyZip_Type DATA - Py_AddPendingCall=python310.Py_AddPendingCall - Py_AtExit=python310.Py_AtExit - Py_BuildValue=python310.Py_BuildValue - Py_CompileString=python310.Py_CompileString - Py_DecRef=python310.Py_DecRef - Py_DecodeLocale=python310.Py_DecodeLocale - Py_EncodeLocale=python310.Py_EncodeLocale - Py_EndInterpreter=python310.Py_EndInterpreter - Py_EnterRecursiveCall=python310.Py_EnterRecursiveCall - Py_Exit=python310.Py_Exit - Py_FatalError=python310.Py_FatalError - Py_FileSystemDefaultEncodeErrors=python310.Py_FileSystemDefaultEncodeErrors DATA - Py_FileSystemDefaultEncoding=python310.Py_FileSystemDefaultEncoding DATA - Py_Finalize=python310.Py_Finalize - Py_FinalizeEx=python310.Py_FinalizeEx - Py_GenericAlias=python310.Py_GenericAlias - Py_GenericAliasType=python310.Py_GenericAliasType - Py_GetArgcArgv=python310.Py_GetArgcArgv - Py_GetBuildInfo=python310.Py_GetBuildInfo - Py_GetCompiler=python310.Py_GetCompiler - Py_GetCopyright=python310.Py_GetCopyright - Py_GetExecPrefix=python310.Py_GetExecPrefix - Py_GetPath=python310.Py_GetPath - Py_GetPlatform=python310.Py_GetPlatform - Py_GetPrefix=python310.Py_GetPrefix - Py_GetProgramFullPath=python310.Py_GetProgramFullPath - Py_GetProgramName=python310.Py_GetProgramName - Py_GetPythonHome=python310.Py_GetPythonHome - Py_GetRecursionLimit=python310.Py_GetRecursionLimit - Py_GetVersion=python310.Py_GetVersion - Py_HasFileSystemDefaultEncoding=python310.Py_HasFileSystemDefaultEncoding DATA - Py_IncRef=python310.Py_IncRef - Py_Initialize=python310.Py_Initialize - Py_InitializeEx=python310.Py_InitializeEx - Py_IsInitialized=python310.Py_IsInitialized - Py_LeaveRecursiveCall=python310.Py_LeaveRecursiveCall - Py_Main=python310.Py_Main - Py_MakePendingCalls=python310.Py_MakePendingCalls - Py_NewInterpreter=python310.Py_NewInterpreter - Py_ReprEnter=python310.Py_ReprEnter - Py_ReprLeave=python310.Py_ReprLeave - Py_SetPath=python310.Py_SetPath - Py_SetProgramName=python310.Py_SetProgramName - Py_SetPythonHome=python310.Py_SetPythonHome - Py_SetRecursionLimit=python310.Py_SetRecursionLimit - Py_SymtableString=python310.Py_SymtableString - Py_UTF8Mode=python310.Py_UTF8Mode DATA - Py_VaBuildValue=python310.Py_VaBuildValue - _PyArg_ParseTupleAndKeywords_SizeT=python310._PyArg_ParseTupleAndKeywords_SizeT - _PyArg_ParseTuple_SizeT=python310._PyArg_ParseTuple_SizeT - _PyArg_Parse_SizeT=python310._PyArg_Parse_SizeT - _PyArg_VaParseTupleAndKeywords_SizeT=python310._PyArg_VaParseTupleAndKeywords_SizeT - _PyArg_VaParse_SizeT=python310._PyArg_VaParse_SizeT - _PyErr_BadInternalCall=python310._PyErr_BadInternalCall - _PyObject_CallFunction_SizeT=python310._PyObject_CallFunction_SizeT - _PyObject_CallMethod_SizeT=python310._PyObject_CallMethod_SizeT - _PyObject_GC_Malloc=python310._PyObject_GC_Malloc - _PyObject_GC_New=python310._PyObject_GC_New - _PyObject_GC_NewVar=python310._PyObject_GC_NewVar - _PyObject_GC_Resize=python310._PyObject_GC_Resize - _PyObject_New=python310._PyObject_New - _PyObject_NewVar=python310._PyObject_NewVar - _PyState_AddModule=python310._PyState_AddModule - _PyThreadState_Init=python310._PyThreadState_Init - _PyThreadState_Prealloc=python310._PyThreadState_Prealloc - _PyTrash_delete_later=python310._PyTrash_delete_later DATA - _PyTrash_delete_nesting=python310._PyTrash_delete_nesting DATA - _PyTrash_deposit_object=python310._PyTrash_deposit_object - _PyTrash_destroy_chain=python310._PyTrash_destroy_chain - _PyTrash_thread_deposit_object=python310._PyTrash_thread_deposit_object - _PyTrash_thread_destroy_chain=python310._PyTrash_thread_destroy_chain - _PyWeakref_CallableProxyType=python310._PyWeakref_CallableProxyType DATA - _PyWeakref_ProxyType=python310._PyWeakref_ProxyType DATA - _PyWeakref_RefType=python310._PyWeakref_RefType DATA - _Py_BuildValue_SizeT=python310._Py_BuildValue_SizeT - _Py_CheckRecursionLimit=python310._Py_CheckRecursionLimit DATA - _Py_CheckRecursiveCall=python310._Py_CheckRecursiveCall - _Py_Dealloc=python310._Py_Dealloc - _Py_EllipsisObject=python310._Py_EllipsisObject DATA - _Py_FalseStruct=python310._Py_FalseStruct DATA - _Py_NoneStruct=python310._Py_NoneStruct DATA - _Py_NotImplementedStruct=python310._Py_NotImplementedStruct DATA - _Py_SwappedOp=python310._Py_SwappedOp DATA - _Py_TrueStruct=python310._Py_TrueStruct DATA - _Py_VaBuildValue_SizeT=python310._Py_VaBuildValue_SizeT diff --git a/PC/python3dll.c b/PC/python3dll.c index ef2907112564f..2f29e83f612a0 100644 --- a/PC/python3dll.c +++ b/PC/python3dll.c @@ -1,9 +1,814 @@ -#include - -BOOL WINAPI -DllMain(HINSTANCE hInstDLL, - DWORD fdwReason, - LPVOID lpReserved) -{ - return TRUE; -} \ No newline at end of file +/* Re-export stable Python API */ + +#ifdef _M_IX86 +#define DECORATE "_" +#else +#define DECORATE +#endif + +#define EXPORT_FUNC(name) \ + __pragma(comment(linker, "/EXPORT:" DECORATE #name "=" PYTHON_DLL_NAME "." #name)) +#define EXPORT_DATA(name) \ + __pragma(comment(linker, "/EXPORT:" DECORATE #name "=" PYTHON_DLL_NAME "." #name ",DATA")) + +EXPORT_FUNC(_Py_BuildValue_SizeT) +EXPORT_FUNC(_Py_CheckRecursiveCall) +EXPORT_FUNC(_Py_Dealloc) +EXPORT_FUNC(_Py_VaBuildValue_SizeT) +EXPORT_FUNC(_PyArg_Parse_SizeT) +EXPORT_FUNC(_PyArg_ParseTuple_SizeT) +EXPORT_FUNC(_PyArg_ParseTupleAndKeywords_SizeT) +EXPORT_FUNC(_PyArg_VaParse_SizeT) +EXPORT_FUNC(_PyArg_VaParseTupleAndKeywords_SizeT) +EXPORT_FUNC(_PyErr_BadInternalCall) +EXPORT_FUNC(_PyObject_CallFunction_SizeT) +EXPORT_FUNC(_PyObject_CallMethod_SizeT) +EXPORT_FUNC(_PyObject_GC_Malloc) +EXPORT_FUNC(_PyObject_GC_New) +EXPORT_FUNC(_PyObject_GC_NewVar) +EXPORT_FUNC(_PyObject_GC_Resize) +EXPORT_FUNC(_PyObject_New) +EXPORT_FUNC(_PyObject_NewVar) +EXPORT_FUNC(_PyState_AddModule) +EXPORT_FUNC(_PyThreadState_Init) +EXPORT_FUNC(_PyThreadState_Prealloc) +EXPORT_FUNC(_PyTrash_deposit_object) +EXPORT_FUNC(_PyTrash_destroy_chain) +EXPORT_FUNC(_PyTrash_thread_deposit_object) +EXPORT_FUNC(_PyTrash_thread_destroy_chain) +EXPORT_FUNC(Py_AddPendingCall) +EXPORT_FUNC(Py_AtExit) +EXPORT_FUNC(Py_BuildValue) +EXPORT_FUNC(Py_CompileString) +EXPORT_FUNC(Py_DecodeLocale) +EXPORT_FUNC(Py_DecRef) +EXPORT_FUNC(Py_EncodeLocale) +EXPORT_FUNC(Py_EndInterpreter) +EXPORT_FUNC(Py_EnterRecursiveCall) +EXPORT_FUNC(Py_Exit) +EXPORT_FUNC(Py_FatalError) +EXPORT_FUNC(Py_Finalize) +EXPORT_FUNC(Py_FinalizeEx) +EXPORT_FUNC(Py_GenericAlias) +EXPORT_FUNC(Py_GenericAliasType) +EXPORT_FUNC(Py_GetArgcArgv) +EXPORT_FUNC(Py_GetBuildInfo) +EXPORT_FUNC(Py_GetCompiler) +EXPORT_FUNC(Py_GetCopyright) +EXPORT_FUNC(Py_GetExecPrefix) +EXPORT_FUNC(Py_GetPath) +EXPORT_FUNC(Py_GetPlatform) +EXPORT_FUNC(Py_GetPrefix) +EXPORT_FUNC(Py_GetProgramFullPath) +EXPORT_FUNC(Py_GetProgramName) +EXPORT_FUNC(Py_GetPythonHome) +EXPORT_FUNC(Py_GetRecursionLimit) +EXPORT_FUNC(Py_GetVersion) +EXPORT_FUNC(Py_IncRef) +EXPORT_FUNC(Py_Initialize) +EXPORT_FUNC(Py_InitializeEx) +EXPORT_FUNC(Py_IsInitialized) +EXPORT_FUNC(Py_LeaveRecursiveCall) +EXPORT_FUNC(Py_Main) +EXPORT_FUNC(Py_MakePendingCalls) +EXPORT_FUNC(Py_NewInterpreter) +EXPORT_FUNC(Py_ReprEnter) +EXPORT_FUNC(Py_ReprLeave) +EXPORT_FUNC(Py_SetPath) +EXPORT_FUNC(Py_SetProgramName) +EXPORT_FUNC(Py_SetPythonHome) +EXPORT_FUNC(Py_SetRecursionLimit) +EXPORT_FUNC(Py_SymtableString) +EXPORT_FUNC(Py_VaBuildValue) +EXPORT_FUNC(PyArg_Parse) +EXPORT_FUNC(PyArg_ParseTuple) +EXPORT_FUNC(PyArg_ParseTupleAndKeywords) +EXPORT_FUNC(PyArg_UnpackTuple) +EXPORT_FUNC(PyArg_ValidateKeywordArguments) +EXPORT_FUNC(PyArg_VaParse) +EXPORT_FUNC(PyArg_VaParseTupleAndKeywords) +EXPORT_FUNC(PyBool_FromLong) +EXPORT_FUNC(PyByteArray_AsString) +EXPORT_FUNC(PyByteArray_Concat) +EXPORT_FUNC(PyByteArray_FromObject) +EXPORT_FUNC(PyByteArray_FromStringAndSize) +EXPORT_FUNC(PyByteArray_Resize) +EXPORT_FUNC(PyByteArray_Size) +EXPORT_FUNC(PyBytes_AsString) +EXPORT_FUNC(PyBytes_AsStringAndSize) +EXPORT_FUNC(PyBytes_Concat) +EXPORT_FUNC(PyBytes_ConcatAndDel) +EXPORT_FUNC(PyBytes_DecodeEscape) +EXPORT_FUNC(PyBytes_FromFormat) +EXPORT_FUNC(PyBytes_FromFormatV) +EXPORT_FUNC(PyBytes_FromObject) +EXPORT_FUNC(PyBytes_FromString) +EXPORT_FUNC(PyBytes_FromStringAndSize) +EXPORT_FUNC(PyBytes_Repr) +EXPORT_FUNC(PyBytes_Size) +EXPORT_FUNC(PyCallable_Check) +EXPORT_FUNC(PyCallIter_New) +EXPORT_FUNC(PyCapsule_GetContext) +EXPORT_FUNC(PyCapsule_GetDestructor) +EXPORT_FUNC(PyCapsule_GetName) +EXPORT_FUNC(PyCapsule_GetPointer) +EXPORT_FUNC(PyCapsule_Import) +EXPORT_FUNC(PyCapsule_IsValid) +EXPORT_FUNC(PyCapsule_New) +EXPORT_FUNC(PyCapsule_SetContext) +EXPORT_FUNC(PyCapsule_SetDestructor) +EXPORT_FUNC(PyCapsule_SetName) +EXPORT_FUNC(PyCapsule_SetPointer) +EXPORT_FUNC(PyCFunction_Call) +EXPORT_FUNC(PyCFunction_GetFlags) +EXPORT_FUNC(PyCFunction_GetFunction) +EXPORT_FUNC(PyCFunction_GetSelf) +EXPORT_FUNC(PyCFunction_New) +EXPORT_FUNC(PyCFunction_NewEx) +EXPORT_FUNC(PyCodec_BackslashReplaceErrors) +EXPORT_FUNC(PyCodec_Decode) +EXPORT_FUNC(PyCodec_Decoder) +EXPORT_FUNC(PyCodec_Encode) +EXPORT_FUNC(PyCodec_Encoder) +EXPORT_FUNC(PyCodec_IgnoreErrors) +EXPORT_FUNC(PyCodec_IncrementalDecoder) +EXPORT_FUNC(PyCodec_IncrementalEncoder) +EXPORT_FUNC(PyCodec_KnownEncoding) +EXPORT_FUNC(PyCodec_LookupError) +EXPORT_FUNC(PyCodec_NameReplaceErrors) +EXPORT_FUNC(PyCodec_Register) +EXPORT_FUNC(PyCodec_RegisterError) +EXPORT_FUNC(PyCodec_ReplaceErrors) +EXPORT_FUNC(PyCodec_StreamReader) +EXPORT_FUNC(PyCodec_StreamWriter) +EXPORT_FUNC(PyCodec_StrictErrors) +EXPORT_FUNC(PyCodec_XMLCharRefReplaceErrors) +EXPORT_FUNC(PyComplex_FromDoubles) +EXPORT_FUNC(PyComplex_ImagAsDouble) +EXPORT_FUNC(PyComplex_RealAsDouble) +EXPORT_FUNC(PyDescr_NewClassMethod) +EXPORT_FUNC(PyDescr_NewGetSet) +EXPORT_FUNC(PyDescr_NewMember) +EXPORT_FUNC(PyDescr_NewMethod) +EXPORT_FUNC(PyDict_Clear) +EXPORT_FUNC(PyDict_Contains) +EXPORT_FUNC(PyDict_Copy) +EXPORT_FUNC(PyDict_DelItem) +EXPORT_FUNC(PyDict_DelItemString) +EXPORT_FUNC(PyDict_GetItem) +EXPORT_FUNC(PyDict_GetItemString) +EXPORT_FUNC(PyDict_GetItemWithError) +EXPORT_FUNC(PyDict_Items) +EXPORT_FUNC(PyDict_Keys) +EXPORT_FUNC(PyDict_Merge) +EXPORT_FUNC(PyDict_MergeFromSeq2) +EXPORT_FUNC(PyDict_New) +EXPORT_FUNC(PyDict_Next) +EXPORT_FUNC(PyDict_SetItem) +EXPORT_FUNC(PyDict_SetItemString) +EXPORT_FUNC(PyDict_Size) +EXPORT_FUNC(PyDict_Update) +EXPORT_FUNC(PyDict_Values) +EXPORT_FUNC(PyDictProxy_New) +EXPORT_FUNC(PyErr_BadArgument) +EXPORT_FUNC(PyErr_BadInternalCall) +EXPORT_FUNC(PyErr_CheckSignals) +EXPORT_FUNC(PyErr_Clear) +EXPORT_FUNC(PyErr_Display) +EXPORT_FUNC(PyErr_ExceptionMatches) +EXPORT_FUNC(PyErr_Fetch) +EXPORT_FUNC(PyErr_Format) +EXPORT_FUNC(PyErr_FormatV) +EXPORT_FUNC(PyErr_GetExcInfo) +EXPORT_FUNC(PyErr_GivenExceptionMatches) +EXPORT_FUNC(PyErr_NewException) +EXPORT_FUNC(PyErr_NewExceptionWithDoc) +EXPORT_FUNC(PyErr_NoMemory) +EXPORT_FUNC(PyErr_NormalizeException) +EXPORT_FUNC(PyErr_Occurred) +EXPORT_FUNC(PyErr_Print) +EXPORT_FUNC(PyErr_PrintEx) +EXPORT_FUNC(PyErr_ProgramText) +EXPORT_FUNC(PyErr_ResourceWarning) +EXPORT_FUNC(PyErr_Restore) +EXPORT_FUNC(PyErr_SetExcFromWindowsErr) +EXPORT_FUNC(PyErr_SetExcFromWindowsErrWithFilename) +EXPORT_FUNC(PyErr_SetExcFromWindowsErrWithFilenameObject) +EXPORT_FUNC(PyErr_SetExcFromWindowsErrWithFilenameObjects) +EXPORT_FUNC(PyErr_SetExcInfo) +EXPORT_FUNC(PyErr_SetFromErrno) +EXPORT_FUNC(PyErr_SetFromErrnoWithFilename) +EXPORT_FUNC(PyErr_SetFromErrnoWithFilenameObject) +EXPORT_FUNC(PyErr_SetFromErrnoWithFilenameObjects) +EXPORT_FUNC(PyErr_SetFromWindowsErr) +EXPORT_FUNC(PyErr_SetFromWindowsErrWithFilename) +EXPORT_FUNC(PyErr_SetImportError) +EXPORT_FUNC(PyErr_SetImportErrorSubclass) +EXPORT_FUNC(PyErr_SetInterrupt) +EXPORT_FUNC(PyErr_SetNone) +EXPORT_FUNC(PyErr_SetObject) +EXPORT_FUNC(PyErr_SetString) +EXPORT_FUNC(PyErr_SyntaxLocation) +EXPORT_FUNC(PyErr_SyntaxLocationEx) +EXPORT_FUNC(PyErr_WarnEx) +EXPORT_FUNC(PyErr_WarnExplicit) +EXPORT_FUNC(PyErr_WarnFormat) +EXPORT_FUNC(PyErr_WriteUnraisable) +EXPORT_FUNC(PyEval_AcquireLock) +EXPORT_FUNC(PyEval_AcquireThread) +EXPORT_FUNC(PyEval_CallFunction) +EXPORT_FUNC(PyEval_CallMethod) +EXPORT_FUNC(PyEval_CallObjectWithKeywords) +EXPORT_FUNC(PyEval_EvalCode) +EXPORT_FUNC(PyEval_EvalCodeEx) +EXPORT_FUNC(PyEval_EvalFrame) +EXPORT_FUNC(PyEval_EvalFrameEx) +EXPORT_FUNC(PyEval_GetBuiltins) +EXPORT_FUNC(PyEval_GetCallStats) +EXPORT_FUNC(PyEval_GetFrame) +EXPORT_FUNC(PyEval_GetFuncDesc) +EXPORT_FUNC(PyEval_GetFuncName) +EXPORT_FUNC(PyEval_GetGlobals) +EXPORT_FUNC(PyEval_GetLocals) +EXPORT_FUNC(PyEval_InitThreads) +EXPORT_FUNC(PyEval_ReInitThreads) +EXPORT_FUNC(PyEval_ReleaseLock) +EXPORT_FUNC(PyEval_ReleaseThread) +EXPORT_FUNC(PyEval_RestoreThread) +EXPORT_FUNC(PyEval_SaveThread) +EXPORT_FUNC(PyEval_ThreadsInitialized) +EXPORT_FUNC(PyException_GetCause) +EXPORT_FUNC(PyException_GetContext) +EXPORT_FUNC(PyException_GetTraceback) +EXPORT_FUNC(PyException_SetCause) +EXPORT_FUNC(PyException_SetContext) +EXPORT_FUNC(PyException_SetTraceback) +EXPORT_FUNC(PyExceptionClass_Name) +EXPORT_FUNC(PyFile_FromFd) +EXPORT_FUNC(PyFile_GetLine) +EXPORT_FUNC(PyFile_WriteObject) +EXPORT_FUNC(PyFile_WriteString) +EXPORT_FUNC(PyFloat_AsDouble) +EXPORT_FUNC(PyFloat_FromDouble) +EXPORT_FUNC(PyFloat_FromString) +EXPORT_FUNC(PyFloat_GetInfo) +EXPORT_FUNC(PyFloat_GetMax) +EXPORT_FUNC(PyFloat_GetMin) +EXPORT_FUNC(PyFrozenSet_New) +EXPORT_FUNC(PyGC_Collect) +EXPORT_FUNC(PyGILState_Ensure) +EXPORT_FUNC(PyGILState_GetThisThreadState) +EXPORT_FUNC(PyGILState_Release) +EXPORT_FUNC(PyImport_AddModule) +EXPORT_FUNC(PyImport_AddModuleObject) +EXPORT_FUNC(PyImport_AppendInittab) +EXPORT_FUNC(PyImport_Cleanup) +EXPORT_FUNC(PyImport_ExecCodeModule) +EXPORT_FUNC(PyImport_ExecCodeModuleEx) +EXPORT_FUNC(PyImport_ExecCodeModuleObject) +EXPORT_FUNC(PyImport_ExecCodeModuleWithPathnames) +EXPORT_FUNC(PyImport_GetImporter) +EXPORT_FUNC(PyImport_GetMagicNumber) +EXPORT_FUNC(PyImport_GetMagicTag) +EXPORT_FUNC(PyImport_GetModule) +EXPORT_FUNC(PyImport_GetModuleDict) +EXPORT_FUNC(PyImport_Import) +EXPORT_FUNC(PyImport_ImportFrozenModule) +EXPORT_FUNC(PyImport_ImportFrozenModuleObject) +EXPORT_FUNC(PyImport_ImportModule) +EXPORT_FUNC(PyImport_ImportModuleLevel) +EXPORT_FUNC(PyImport_ImportModuleLevelObject) +EXPORT_FUNC(PyImport_ImportModuleNoBlock) +EXPORT_FUNC(PyImport_ReloadModule) +EXPORT_FUNC(PyIndex_Check) +EXPORT_FUNC(PyInterpreterState_Clear) +EXPORT_FUNC(PyInterpreterState_Delete) +EXPORT_FUNC(PyInterpreterState_GetID) +EXPORT_FUNC(PyInterpreterState_New) +EXPORT_FUNC(PyIter_Check) +EXPORT_FUNC(PyIter_Next) +EXPORT_FUNC(PyList_Append) +EXPORT_FUNC(PyList_AsTuple) +EXPORT_FUNC(PyList_GetItem) +EXPORT_FUNC(PyList_GetSlice) +EXPORT_FUNC(PyList_Insert) +EXPORT_FUNC(PyList_New) +EXPORT_FUNC(PyList_Reverse) +EXPORT_FUNC(PyList_SetItem) +EXPORT_FUNC(PyList_SetSlice) +EXPORT_FUNC(PyList_Size) +EXPORT_FUNC(PyList_Sort) +EXPORT_FUNC(PyLong_AsDouble) +EXPORT_FUNC(PyLong_AsLong) +EXPORT_FUNC(PyLong_AsLongAndOverflow) +EXPORT_FUNC(PyLong_AsLongLong) +EXPORT_FUNC(PyLong_AsLongLongAndOverflow) +EXPORT_FUNC(PyLong_AsSize_t) +EXPORT_FUNC(PyLong_AsSsize_t) +EXPORT_FUNC(PyLong_AsUnsignedLong) +EXPORT_FUNC(PyLong_AsUnsignedLongLong) +EXPORT_FUNC(PyLong_AsUnsignedLongLongMask) +EXPORT_FUNC(PyLong_AsUnsignedLongMask) +EXPORT_FUNC(PyLong_AsVoidPtr) +EXPORT_FUNC(PyLong_FromDouble) +EXPORT_FUNC(PyLong_FromLong) +EXPORT_FUNC(PyLong_FromLongLong) +EXPORT_FUNC(PyLong_FromSize_t) +EXPORT_FUNC(PyLong_FromSsize_t) +EXPORT_FUNC(PyLong_FromString) +EXPORT_FUNC(PyLong_FromUnsignedLong) +EXPORT_FUNC(PyLong_FromUnsignedLongLong) +EXPORT_FUNC(PyLong_FromVoidPtr) +EXPORT_FUNC(PyLong_GetInfo) +EXPORT_FUNC(PyMapping_Check) +EXPORT_FUNC(PyMapping_GetItemString) +EXPORT_FUNC(PyMapping_HasKey) +EXPORT_FUNC(PyMapping_HasKeyString) +EXPORT_FUNC(PyMapping_Items) +EXPORT_FUNC(PyMapping_Keys) +EXPORT_FUNC(PyMapping_Length) +EXPORT_FUNC(PyMapping_SetItemString) +EXPORT_FUNC(PyMapping_Size) +EXPORT_FUNC(PyMapping_Values) +EXPORT_FUNC(PyMem_Calloc) +EXPORT_FUNC(PyMem_Free) +EXPORT_FUNC(PyMem_Malloc) +EXPORT_FUNC(PyMem_Realloc) +EXPORT_FUNC(PyMemoryView_FromMemory) +EXPORT_FUNC(PyMemoryView_FromObject) +EXPORT_FUNC(PyMemoryView_GetContiguous) +EXPORT_FUNC(PyModule_AddFunctions) +EXPORT_FUNC(PyModule_AddIntConstant) +EXPORT_FUNC(PyModule_AddObject) +EXPORT_FUNC(PyModule_AddStringConstant) +EXPORT_FUNC(PyModule_Create2) +EXPORT_FUNC(PyModule_ExecDef) +EXPORT_FUNC(PyModule_FromDefAndSpec2) +EXPORT_FUNC(PyModule_GetDef) +EXPORT_FUNC(PyModule_GetDict) +EXPORT_FUNC(PyModule_GetFilename) +EXPORT_FUNC(PyModule_GetFilenameObject) +EXPORT_FUNC(PyModule_GetName) +EXPORT_FUNC(PyModule_GetNameObject) +EXPORT_FUNC(PyModule_GetState) +EXPORT_FUNC(PyModule_New) +EXPORT_FUNC(PyModule_NewObject) +EXPORT_FUNC(PyModule_SetDocString) +EXPORT_FUNC(PyModuleDef_Init) +EXPORT_FUNC(PyNumber_Absolute) +EXPORT_FUNC(PyNumber_Add) +EXPORT_FUNC(PyNumber_And) +EXPORT_FUNC(PyNumber_AsSsize_t) +EXPORT_FUNC(PyNumber_Check) +EXPORT_FUNC(PyNumber_Divmod) +EXPORT_FUNC(PyNumber_Float) +EXPORT_FUNC(PyNumber_FloorDivide) +EXPORT_FUNC(PyNumber_Index) +EXPORT_FUNC(PyNumber_InPlaceAdd) +EXPORT_FUNC(PyNumber_InPlaceAnd) +EXPORT_FUNC(PyNumber_InPlaceFloorDivide) +EXPORT_FUNC(PyNumber_InPlaceLshift) +EXPORT_FUNC(PyNumber_InPlaceMatrixMultiply) +EXPORT_FUNC(PyNumber_InPlaceMultiply) +EXPORT_FUNC(PyNumber_InPlaceOr) +EXPORT_FUNC(PyNumber_InPlacePower) +EXPORT_FUNC(PyNumber_InPlaceRemainder) +EXPORT_FUNC(PyNumber_InPlaceRshift) +EXPORT_FUNC(PyNumber_InPlaceSubtract) +EXPORT_FUNC(PyNumber_InPlaceTrueDivide) +EXPORT_FUNC(PyNumber_InPlaceXor) +EXPORT_FUNC(PyNumber_Invert) +EXPORT_FUNC(PyNumber_Long) +EXPORT_FUNC(PyNumber_Lshift) +EXPORT_FUNC(PyNumber_MatrixMultiply) +EXPORT_FUNC(PyNumber_Multiply) +EXPORT_FUNC(PyNumber_Negative) +EXPORT_FUNC(PyNumber_Or) +EXPORT_FUNC(PyNumber_Positive) +EXPORT_FUNC(PyNumber_Power) +EXPORT_FUNC(PyNumber_Remainder) +EXPORT_FUNC(PyNumber_Rshift) +EXPORT_FUNC(PyNumber_Subtract) +EXPORT_FUNC(PyNumber_ToBase) +EXPORT_FUNC(PyNumber_TrueDivide) +EXPORT_FUNC(PyNumber_Xor) +EXPORT_FUNC(PyObject_AsCharBuffer) +EXPORT_FUNC(PyObject_ASCII) +EXPORT_FUNC(PyObject_AsFileDescriptor) +EXPORT_FUNC(PyObject_AsReadBuffer) +EXPORT_FUNC(PyObject_AsWriteBuffer) +EXPORT_FUNC(PyObject_Bytes) +EXPORT_FUNC(PyObject_Call) +EXPORT_FUNC(PyObject_CallFunction) +EXPORT_FUNC(PyObject_CallFunctionObjArgs) +EXPORT_FUNC(PyObject_CallMethod) +EXPORT_FUNC(PyObject_CallMethodObjArgs) +EXPORT_FUNC(PyObject_CallObject) +EXPORT_FUNC(PyObject_Calloc) +EXPORT_FUNC(PyObject_CheckReadBuffer) +EXPORT_FUNC(PyObject_ClearWeakRefs) +EXPORT_FUNC(PyObject_DelItem) +EXPORT_FUNC(PyObject_DelItemString) +EXPORT_FUNC(PyObject_Dir) +EXPORT_FUNC(PyObject_Format) +EXPORT_FUNC(PyObject_Free) +EXPORT_FUNC(PyObject_GC_Del) +EXPORT_FUNC(PyObject_GC_Track) +EXPORT_FUNC(PyObject_GC_UnTrack) +EXPORT_FUNC(PyObject_GenericGetAttr) +EXPORT_FUNC(PyObject_GenericSetAttr) +EXPORT_FUNC(PyObject_GenericSetDict) +EXPORT_FUNC(PyObject_GetAttr) +EXPORT_FUNC(PyObject_GetAttrString) +EXPORT_FUNC(PyObject_GetItem) +EXPORT_FUNC(PyObject_GetIter) +EXPORT_FUNC(PyObject_HasAttr) +EXPORT_FUNC(PyObject_HasAttrString) +EXPORT_FUNC(PyObject_Hash) +EXPORT_FUNC(PyObject_HashNotImplemented) +EXPORT_FUNC(PyObject_Init) +EXPORT_FUNC(PyObject_InitVar) +EXPORT_FUNC(PyObject_IsInstance) +EXPORT_FUNC(PyObject_IsSubclass) +EXPORT_FUNC(PyObject_IsTrue) +EXPORT_FUNC(PyObject_Length) +EXPORT_FUNC(PyObject_Malloc) +EXPORT_FUNC(PyObject_Not) +EXPORT_FUNC(PyObject_Realloc) +EXPORT_FUNC(PyObject_Repr) +EXPORT_FUNC(PyObject_RichCompare) +EXPORT_FUNC(PyObject_RichCompareBool) +EXPORT_FUNC(PyObject_SelfIter) +EXPORT_FUNC(PyObject_SetAttr) +EXPORT_FUNC(PyObject_SetAttrString) +EXPORT_FUNC(PyObject_SetItem) +EXPORT_FUNC(PyObject_Size) +EXPORT_FUNC(PyObject_Str) +EXPORT_FUNC(PyObject_Type) +EXPORT_FUNC(PyODict_DelItem) +EXPORT_FUNC(PyODict_New) +EXPORT_FUNC(PyODict_SetItem) +EXPORT_FUNC(PyOS_AfterFork) +EXPORT_FUNC(PyOS_CheckStack) +EXPORT_FUNC(PyOS_double_to_string) +EXPORT_FUNC(PyOS_FSPath) +EXPORT_FUNC(PyOS_getsig) +EXPORT_FUNC(PyOS_InitInterrupts) +EXPORT_FUNC(PyOS_InterruptOccurred) +EXPORT_FUNC(PyOS_mystricmp) +EXPORT_FUNC(PyOS_mystrnicmp) +EXPORT_FUNC(PyOS_setsig) +EXPORT_FUNC(PyOS_snprintf) +EXPORT_FUNC(PyOS_string_to_double) +EXPORT_FUNC(PyOS_strtol) +EXPORT_FUNC(PyOS_strtoul) +EXPORT_FUNC(PyOS_vsnprintf) +EXPORT_FUNC(PyParser_SimpleParseFileFlags) +EXPORT_FUNC(PyParser_SimpleParseStringFlags) +EXPORT_FUNC(PyParser_SimpleParseStringFlagsFilename) +EXPORT_FUNC(PySeqIter_New) +EXPORT_FUNC(PySequence_Check) +EXPORT_FUNC(PySequence_Concat) +EXPORT_FUNC(PySequence_Contains) +EXPORT_FUNC(PySequence_Count) +EXPORT_FUNC(PySequence_DelItem) +EXPORT_FUNC(PySequence_DelSlice) +EXPORT_FUNC(PySequence_Fast) +EXPORT_FUNC(PySequence_GetItem) +EXPORT_FUNC(PySequence_GetSlice) +EXPORT_FUNC(PySequence_In) +EXPORT_FUNC(PySequence_Index) +EXPORT_FUNC(PySequence_InPlaceConcat) +EXPORT_FUNC(PySequence_InPlaceRepeat) +EXPORT_FUNC(PySequence_Length) +EXPORT_FUNC(PySequence_List) +EXPORT_FUNC(PySequence_Repeat) +EXPORT_FUNC(PySequence_SetItem) +EXPORT_FUNC(PySequence_SetSlice) +EXPORT_FUNC(PySequence_Size) +EXPORT_FUNC(PySequence_Tuple) +EXPORT_FUNC(PySet_Add) +EXPORT_FUNC(PySet_Clear) +EXPORT_FUNC(PySet_Contains) +EXPORT_FUNC(PySet_Discard) +EXPORT_FUNC(PySet_New) +EXPORT_FUNC(PySet_Pop) +EXPORT_FUNC(PySet_Size) +EXPORT_FUNC(PySlice_AdjustIndices) +EXPORT_FUNC(PySlice_GetIndices) +EXPORT_FUNC(PySlice_GetIndicesEx) +EXPORT_FUNC(PySlice_New) +EXPORT_FUNC(PySlice_Unpack) +EXPORT_FUNC(PyState_AddModule) +EXPORT_FUNC(PyState_FindModule) +EXPORT_FUNC(PyState_RemoveModule) +EXPORT_FUNC(PyStructSequence_GetItem) +EXPORT_FUNC(PyStructSequence_New) +EXPORT_FUNC(PyStructSequence_NewType) +EXPORT_FUNC(PyStructSequence_SetItem) +EXPORT_FUNC(PySys_AddWarnOption) +EXPORT_FUNC(PySys_AddWarnOptionUnicode) +EXPORT_FUNC(PySys_AddXOption) +EXPORT_FUNC(PySys_FormatStderr) +EXPORT_FUNC(PySys_FormatStdout) +EXPORT_FUNC(PySys_GetObject) +EXPORT_FUNC(PySys_GetXOptions) +EXPORT_FUNC(PySys_HasWarnOptions) +EXPORT_FUNC(PySys_ResetWarnOptions) +EXPORT_FUNC(PySys_SetArgv) +EXPORT_FUNC(PySys_SetArgvEx) +EXPORT_FUNC(PySys_SetObject) +EXPORT_FUNC(PySys_SetPath) +EXPORT_FUNC(PySys_WriteStderr) +EXPORT_FUNC(PySys_WriteStdout) +EXPORT_FUNC(PyThread_tss_alloc) +EXPORT_FUNC(PyThread_tss_create) +EXPORT_FUNC(PyThread_tss_delete) +EXPORT_FUNC(PyThread_tss_free) +EXPORT_FUNC(PyThread_tss_get) +EXPORT_FUNC(PyThread_tss_is_created) +EXPORT_FUNC(PyThread_tss_set) +EXPORT_FUNC(PyThreadState_Clear) +EXPORT_FUNC(PyThreadState_Delete) +EXPORT_FUNC(PyThreadState_DeleteCurrent) +EXPORT_FUNC(PyThreadState_Get) +EXPORT_FUNC(PyThreadState_GetDict) +EXPORT_FUNC(PyThreadState_New) +EXPORT_FUNC(PyThreadState_SetAsyncExc) +EXPORT_FUNC(PyThreadState_Swap) +EXPORT_FUNC(PyTraceBack_Here) +EXPORT_FUNC(PyTraceBack_Print) +EXPORT_FUNC(PyTuple_GetItem) +EXPORT_FUNC(PyTuple_GetSlice) +EXPORT_FUNC(PyTuple_New) +EXPORT_FUNC(PyTuple_Pack) +EXPORT_FUNC(PyTuple_SetItem) +EXPORT_FUNC(PyTuple_Size) +EXPORT_FUNC(PyType_ClearCache) +EXPORT_FUNC(PyType_FromSpec) +EXPORT_FUNC(PyType_FromSpecWithBases) +EXPORT_FUNC(PyType_GenericAlloc) +EXPORT_FUNC(PyType_GenericNew) +EXPORT_FUNC(PyType_GetFlags) +EXPORT_FUNC(PyType_GetSlot) +EXPORT_FUNC(PyType_IsSubtype) +EXPORT_FUNC(PyType_Modified) +EXPORT_FUNC(PyType_Ready) +EXPORT_FUNC(PyUnicode_Append) +EXPORT_FUNC(PyUnicode_AppendAndDel) +EXPORT_FUNC(PyUnicode_AsASCIIString) +EXPORT_FUNC(PyUnicode_AsCharmapString) +EXPORT_FUNC(PyUnicode_AsDecodedObject) +EXPORT_FUNC(PyUnicode_AsDecodedUnicode) +EXPORT_FUNC(PyUnicode_AsEncodedObject) +EXPORT_FUNC(PyUnicode_AsEncodedString) +EXPORT_FUNC(PyUnicode_AsEncodedUnicode) +EXPORT_FUNC(PyUnicode_AsLatin1String) +EXPORT_FUNC(PyUnicode_AsMBCSString) +EXPORT_FUNC(PyUnicode_AsRawUnicodeEscapeString) +EXPORT_FUNC(PyUnicode_AsUCS4) +EXPORT_FUNC(PyUnicode_AsUCS4Copy) +EXPORT_FUNC(PyUnicode_AsUnicodeEscapeString) +EXPORT_FUNC(PyUnicode_AsUTF16String) +EXPORT_FUNC(PyUnicode_AsUTF32String) +EXPORT_FUNC(PyUnicode_AsUTF8String) +EXPORT_FUNC(PyUnicode_AsWideChar) +EXPORT_FUNC(PyUnicode_AsWideCharString) +EXPORT_FUNC(PyUnicode_BuildEncodingMap) +EXPORT_FUNC(PyUnicode_Compare) +EXPORT_FUNC(PyUnicode_CompareWithASCIIString) +EXPORT_FUNC(PyUnicode_Concat) +EXPORT_FUNC(PyUnicode_Contains) +EXPORT_FUNC(PyUnicode_Count) +EXPORT_FUNC(PyUnicode_Decode) +EXPORT_FUNC(PyUnicode_DecodeASCII) +EXPORT_FUNC(PyUnicode_DecodeCharmap) +EXPORT_FUNC(PyUnicode_DecodeCodePageStateful) +EXPORT_FUNC(PyUnicode_DecodeFSDefault) +EXPORT_FUNC(PyUnicode_DecodeFSDefaultAndSize) +EXPORT_FUNC(PyUnicode_DecodeLatin1) +EXPORT_FUNC(PyUnicode_DecodeLocale) +EXPORT_FUNC(PyUnicode_DecodeLocaleAndSize) +EXPORT_FUNC(PyUnicode_DecodeMBCS) +EXPORT_FUNC(PyUnicode_DecodeMBCSStateful) +EXPORT_FUNC(PyUnicode_DecodeRawUnicodeEscape) +EXPORT_FUNC(PyUnicode_DecodeUnicodeEscape) +EXPORT_FUNC(PyUnicode_DecodeUTF16) +EXPORT_FUNC(PyUnicode_DecodeUTF16Stateful) +EXPORT_FUNC(PyUnicode_DecodeUTF32) +EXPORT_FUNC(PyUnicode_DecodeUTF32Stateful) +EXPORT_FUNC(PyUnicode_DecodeUTF7) +EXPORT_FUNC(PyUnicode_DecodeUTF7Stateful) +EXPORT_FUNC(PyUnicode_DecodeUTF8) +EXPORT_FUNC(PyUnicode_DecodeUTF8Stateful) +EXPORT_FUNC(PyUnicode_EncodeCodePage) +EXPORT_FUNC(PyUnicode_EncodeFSDefault) +EXPORT_FUNC(PyUnicode_EncodeLocale) +EXPORT_FUNC(PyUnicode_Find) +EXPORT_FUNC(PyUnicode_FindChar) +EXPORT_FUNC(PyUnicode_Format) +EXPORT_FUNC(PyUnicode_FromEncodedObject) +EXPORT_FUNC(PyUnicode_FromFormat) +EXPORT_FUNC(PyUnicode_FromFormatV) +EXPORT_FUNC(PyUnicode_FromObject) +EXPORT_FUNC(PyUnicode_FromOrdinal) +EXPORT_FUNC(PyUnicode_FromString) +EXPORT_FUNC(PyUnicode_FromStringAndSize) +EXPORT_FUNC(PyUnicode_FromWideChar) +EXPORT_FUNC(PyUnicode_FSConverter) +EXPORT_FUNC(PyUnicode_FSDecoder) +EXPORT_FUNC(PyUnicode_GetDefaultEncoding) +EXPORT_FUNC(PyUnicode_GetLength) +EXPORT_FUNC(PyUnicode_GetSize) +EXPORT_FUNC(PyUnicode_InternFromString) +EXPORT_FUNC(PyUnicode_InternImmortal) +EXPORT_FUNC(PyUnicode_InternInPlace) +EXPORT_FUNC(PyUnicode_IsIdentifier) +EXPORT_FUNC(PyUnicode_Join) +EXPORT_FUNC(PyUnicode_Partition) +EXPORT_FUNC(PyUnicode_ReadChar) +EXPORT_FUNC(PyUnicode_Replace) +EXPORT_FUNC(PyUnicode_Resize) +EXPORT_FUNC(PyUnicode_RichCompare) +EXPORT_FUNC(PyUnicode_RPartition) +EXPORT_FUNC(PyUnicode_RSplit) +EXPORT_FUNC(PyUnicode_Split) +EXPORT_FUNC(PyUnicode_Splitlines) +EXPORT_FUNC(PyUnicode_Substring) +EXPORT_FUNC(PyUnicode_Tailmatch) +EXPORT_FUNC(PyUnicode_Translate) +EXPORT_FUNC(PyUnicode_WriteChar) +EXPORT_FUNC(PyUnicodeDecodeError_Create) +EXPORT_FUNC(PyUnicodeDecodeError_GetEncoding) +EXPORT_FUNC(PyUnicodeDecodeError_GetEnd) +EXPORT_FUNC(PyUnicodeDecodeError_GetObject) +EXPORT_FUNC(PyUnicodeDecodeError_GetReason) +EXPORT_FUNC(PyUnicodeDecodeError_GetStart) +EXPORT_FUNC(PyUnicodeDecodeError_SetEnd) +EXPORT_FUNC(PyUnicodeDecodeError_SetReason) +EXPORT_FUNC(PyUnicodeDecodeError_SetStart) +EXPORT_FUNC(PyUnicodeEncodeError_GetEncoding) +EXPORT_FUNC(PyUnicodeEncodeError_GetEnd) +EXPORT_FUNC(PyUnicodeEncodeError_GetObject) +EXPORT_FUNC(PyUnicodeEncodeError_GetReason) +EXPORT_FUNC(PyUnicodeEncodeError_GetStart) +EXPORT_FUNC(PyUnicodeEncodeError_SetEnd) +EXPORT_FUNC(PyUnicodeEncodeError_SetReason) +EXPORT_FUNC(PyUnicodeEncodeError_SetStart) +EXPORT_FUNC(PyUnicodeTranslateError_GetEnd) +EXPORT_FUNC(PyUnicodeTranslateError_GetObject) +EXPORT_FUNC(PyUnicodeTranslateError_GetReason) +EXPORT_FUNC(PyUnicodeTranslateError_GetStart) +EXPORT_FUNC(PyUnicodeTranslateError_SetEnd) +EXPORT_FUNC(PyUnicodeTranslateError_SetReason) +EXPORT_FUNC(PyUnicodeTranslateError_SetStart) +EXPORT_FUNC(PyWeakref_GetObject) +EXPORT_FUNC(PyWeakref_NewProxy) +EXPORT_FUNC(PyWeakref_NewRef) +EXPORT_FUNC(PyWrapper_New) + +EXPORT_DATA(_Py_CheckRecursionLimit) +EXPORT_DATA(_Py_EllipsisObject) +EXPORT_DATA(_Py_FalseStruct) +EXPORT_DATA(_Py_NoneStruct) +EXPORT_DATA(_Py_NotImplementedStruct) +EXPORT_DATA(_Py_SwappedOp) +EXPORT_DATA(_Py_TrueStruct) +EXPORT_DATA(_PyTrash_delete_later) +EXPORT_DATA(_PyTrash_delete_nesting) +EXPORT_DATA(_PyWeakref_CallableProxyType) +EXPORT_DATA(_PyWeakref_ProxyType) +EXPORT_DATA(_PyWeakref_RefType) +EXPORT_DATA(Py_FileSystemDefaultEncodeErrors) +EXPORT_DATA(Py_FileSystemDefaultEncoding) +EXPORT_DATA(Py_HasFileSystemDefaultEncoding) +EXPORT_DATA(Py_UTF8Mode) +EXPORT_DATA(PyBaseObject_Type) +EXPORT_DATA(PyBool_Type) +EXPORT_DATA(PyByteArray_Type) +EXPORT_DATA(PyByteArrayIter_Type) +EXPORT_DATA(PyBytes_Type) +EXPORT_DATA(PyBytesIter_Type) +EXPORT_DATA(PyCallIter_Type) +EXPORT_DATA(PyCapsule_Type) +EXPORT_DATA(PyCFunction_Type) +EXPORT_DATA(PyClassMethodDescr_Type) +EXPORT_DATA(PyComplex_Type) +EXPORT_DATA(PyDict_Type) +EXPORT_DATA(PyDictItems_Type) +EXPORT_DATA(PyDictIterItem_Type) +EXPORT_DATA(PyDictIterKey_Type) +EXPORT_DATA(PyDictIterValue_Type) +EXPORT_DATA(PyDictKeys_Type) +EXPORT_DATA(PyDictProxy_Type) +EXPORT_DATA(PyDictValues_Type) +EXPORT_DATA(PyEllipsis_Type) +EXPORT_DATA(PyEnum_Type) +EXPORT_DATA(PyExc_ArithmeticError) +EXPORT_DATA(PyExc_AssertionError) +EXPORT_DATA(PyExc_AttributeError) +EXPORT_DATA(PyExc_BaseException) +EXPORT_DATA(PyExc_BlockingIOError) +EXPORT_DATA(PyExc_BrokenPipeError) +EXPORT_DATA(PyExc_BufferError) +EXPORT_DATA(PyExc_BytesWarning) +EXPORT_DATA(PyExc_ChildProcessError) +EXPORT_DATA(PyExc_ConnectionAbortedError) +EXPORT_DATA(PyExc_ConnectionError) +EXPORT_DATA(PyExc_ConnectionRefusedError) +EXPORT_DATA(PyExc_ConnectionResetError) +EXPORT_DATA(PyExc_DeprecationWarning) +EXPORT_DATA(PyExc_EnvironmentError) +EXPORT_DATA(PyExc_EOFError) +EXPORT_DATA(PyExc_Exception) +EXPORT_DATA(PyExc_FileExistsError) +EXPORT_DATA(PyExc_FileNotFoundError) +EXPORT_DATA(PyExc_FloatingPointError) +EXPORT_DATA(PyExc_FutureWarning) +EXPORT_DATA(PyExc_GeneratorExit) +EXPORT_DATA(PyExc_ImportError) +EXPORT_DATA(PyExc_ImportWarning) +EXPORT_DATA(PyExc_IndentationError) +EXPORT_DATA(PyExc_IndexError) +EXPORT_DATA(PyExc_InterruptedError) +EXPORT_DATA(PyExc_IOError) +EXPORT_DATA(PyExc_IsADirectoryError) +EXPORT_DATA(PyExc_KeyboardInterrupt) +EXPORT_DATA(PyExc_KeyError) +EXPORT_DATA(PyExc_LookupError) +EXPORT_DATA(PyExc_MemoryError) +EXPORT_DATA(PyExc_ModuleNotFoundError) +EXPORT_DATA(PyExc_NameError) +EXPORT_DATA(PyExc_NotADirectoryError) +EXPORT_DATA(PyExc_NotImplementedError) +EXPORT_DATA(PyExc_OSError) +EXPORT_DATA(PyExc_OverflowError) +EXPORT_DATA(PyExc_PendingDeprecationWarning) +EXPORT_DATA(PyExc_PermissionError) +EXPORT_DATA(PyExc_ProcessLookupError) +EXPORT_DATA(PyExc_RecursionError) +EXPORT_DATA(PyExc_ReferenceError) +EXPORT_DATA(PyExc_ResourceWarning) +EXPORT_DATA(PyExc_RuntimeError) +EXPORT_DATA(PyExc_RuntimeWarning) +EXPORT_DATA(PyExc_StopAsyncIteration) +EXPORT_DATA(PyExc_StopIteration) +EXPORT_DATA(PyExc_SyntaxError) +EXPORT_DATA(PyExc_SyntaxWarning) +EXPORT_DATA(PyExc_SystemError) +EXPORT_DATA(PyExc_SystemExit) +EXPORT_DATA(PyExc_TabError) +EXPORT_DATA(PyExc_TimeoutError) +EXPORT_DATA(PyExc_TypeError) +EXPORT_DATA(PyExc_UnboundLocalError) +EXPORT_DATA(PyExc_UnicodeDecodeError) +EXPORT_DATA(PyExc_UnicodeEncodeError) +EXPORT_DATA(PyExc_UnicodeError) +EXPORT_DATA(PyExc_UnicodeTranslateError) +EXPORT_DATA(PyExc_UnicodeWarning) +EXPORT_DATA(PyExc_UserWarning) +EXPORT_DATA(PyExc_ValueError) +EXPORT_DATA(PyExc_Warning) +EXPORT_DATA(PyExc_WindowsError) +EXPORT_DATA(PyExc_ZeroDivisionError) +EXPORT_DATA(PyFilter_Type) +EXPORT_DATA(PyFloat_Type) +EXPORT_DATA(PyFrozenSet_Type) +EXPORT_DATA(PyGetSetDescr_Type) +EXPORT_DATA(PyList_Type) +EXPORT_DATA(PyListIter_Type) +EXPORT_DATA(PyListRevIter_Type) +EXPORT_DATA(PyLong_Type) +EXPORT_DATA(PyLongRangeIter_Type) +EXPORT_DATA(PyMap_Type) +EXPORT_DATA(PyMemberDescr_Type) +EXPORT_DATA(PyMemoryView_Type) +EXPORT_DATA(PyMethodDescr_Type) +EXPORT_DATA(PyModule_Type) +EXPORT_DATA(PyModuleDef_Type) +EXPORT_DATA(PyNullImporter_Type) +EXPORT_DATA(PyODict_Type) +EXPORT_DATA(PyODictItems_Type) +EXPORT_DATA(PyODictIter_Type) +EXPORT_DATA(PyODictKeys_Type) +EXPORT_DATA(PyODictValues_Type) +EXPORT_DATA(PyOS_InputHook) +EXPORT_DATA(PyOS_ReadlineFunctionPointer) +EXPORT_DATA(PyProperty_Type) +EXPORT_DATA(PyRange_Type) +EXPORT_DATA(PyRangeIter_Type) +EXPORT_DATA(PyReversed_Type) +EXPORT_DATA(PySeqIter_Type) +EXPORT_DATA(PySet_Type) +EXPORT_DATA(PySetIter_Type) +EXPORT_DATA(PySlice_Type) +EXPORT_DATA(PySortWrapper_Type) +EXPORT_DATA(PySuper_Type) +EXPORT_DATA(PyTraceBack_Type) +EXPORT_DATA(PyTuple_Type) +EXPORT_DATA(PyTupleIter_Type) +EXPORT_DATA(PyType_Type) +EXPORT_DATA(PyUnicode_Type) +EXPORT_DATA(PyUnicodeIter_Type) +EXPORT_DATA(PyWrapperDescr_Type) +EXPORT_DATA(PyZip_Type) diff --git a/PCbuild/python3dll.vcxproj b/PCbuild/python3dll.vcxproj index ef344bed49e9a..ec22e6fc76e58 100644 --- a/PCbuild/python3dll.vcxproj +++ b/PCbuild/python3dll.vcxproj @@ -88,31 +88,16 @@ <_ProjectFileVersion>10.0.30319.1 - <_Machine>X86 - <_Machine Condition="$(Platform) == 'x64'">X64 - <_Machine Condition="$(Platform) == 'ARM'">ARM - <_Machine Condition="$(Platform) == 'ARM64'">ARM64 - $(ExtensionsToDeleteOnClean);$(IntDir)python3_d.def;$(IntDir)python3stub.def + PYTHON_DLL_NAME="$(PyDllName)";%(PreprocessorDefinitions) false - $(OutDir)$(TargetName)stub.lib - $(PySourcePath)PC\python3.def - $(IntDir)python3_d.def - DllMain + true - - lib /nologo /def:"$(IntDir)python3stub.def" /out:"$(OutDir)$(TargetName)stub.lib" /MACHINE:$(_Machine) - Rebuilding $(TargetName)stub.lib - $(OutDir)$(TargetName)stub.lib - - - - @@ -122,62 +107,4 @@ - - - - <_DefLines Remove="@(_DefLines)" /> - <_Lines Remove="@(_Lines)" /> - <_OriginalLines Remove="@(_OriginalLines)" /> - - - - - - - - - <_Pattern1>(=python$(MajorVersionNumber)$(MinorVersionNumber))\. - <_Sub1>$1_d. - <_Pattern2>"python3" - <_Sub2>"python3_d" - - - <_Lines Include="@(_DefLines)"> - $([System.Text.RegularExpressions.Regex]::Replace($([System.Text.RegularExpressions.Regex]::Replace(`%(Identity)`, `$(_Pattern1)`, `$(_Sub1)`)), `$(_Pattern2)`, `$(_Sub2)`)) - - - - - - - - - - <_DefLines Remove="@(_DefLines)" /> - <_Lines Remove="@(_Lines)" /> - <_OriginalLines Remove="@(_OriginalLines)" /> - - - - - - - - - <_Pattern>^[\w.]+=.+?\.([^ ]+).*$ - <_Sub>$1 - - - <_Lines Include="EXPORTS" /> - <_Symbols Include="@(_DefLines)" Condition="$([System.Text.RegularExpressions.Regex]::IsMatch(`%(Identity)`, `$(_Pattern)`))"> - $([System.Text.RegularExpressions.Regex]::Replace(`%(Identity)`, `$(_Pattern)`, `$(_Sub)`)) - - <_Lines Include="@(_Symbols->'%(Symbol)')" /> - - - - - \ No newline at end of file diff --git a/PCbuild/python3dll.vcxproj.filters b/PCbuild/python3dll.vcxproj.filters index a7566052e1204..ba562dfae02a7 100644 --- a/PCbuild/python3dll.vcxproj.filters +++ b/PCbuild/python3dll.vcxproj.filters @@ -10,11 +10,6 @@ rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav - - - Source Files - - Source Files From webhook-mailer at python.org Tue Jun 23 16:55:54 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 20:55:54 -0000 Subject: [Python-checkins] bpo-40521: Make MemoryError free list per interpreter (GH-21086) Message-ID: https://github.com/python/cpython/commit/281cce1106568ef9fec17e3c72d289416fac02a5 commit: 281cce1106568ef9fec17e3c72d289416fac02a5 branch: master author: Victor Stinner committer: GitHub date: 2020-06-23T22:55:46+02:00 summary: bpo-40521: Make MemoryError free list per interpreter (GH-21086) Each interpreter now has its own MemoryError free list: it is not longer shared by all interpreters. Add _Py_exc_state structure and PyInterpreterState.exc_state member. Move also errnomap into _Py_exc_state. files: M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Objects/exceptions.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index c22bea75d2795..435a72a522011 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -152,6 +152,13 @@ struct _Py_context_state { int numfree; }; +struct _Py_exc_state { + // The dict mapping from errno codes to OSError subclasses + PyObject *errnomap; + PyBaseExceptionObject *memerrors_freelist; + int memerrors_numfree; +}; + /* interpreter state */ @@ -251,6 +258,7 @@ struct _is { struct _Py_frame_state frame; struct _Py_async_gen_state async_gen; struct _Py_context_state context; + struct _Py_exc_state exc_state; }; /* Used by _PyImport_Cleanup() */ diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 30ba48423f9ec..cd470441817a2 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -43,7 +43,7 @@ extern PyStatus _PySys_Create( extern PyStatus _PySys_ReadPreinitWarnOptions(PyWideStringList *options); extern PyStatus _PySys_ReadPreinitXOptions(PyConfig *config); extern int _PySys_InitMain(PyThreadState *tstate); -extern PyStatus _PyExc_Init(void); +extern PyStatus _PyExc_Init(PyThreadState *tstate); extern PyStatus _PyErr_Init(void); extern PyStatus _PyBuiltins_AddExceptions(PyObject * bltinmod); extern PyStatus _PyImportHooks_Init(PyThreadState *tstate); @@ -69,7 +69,7 @@ extern void _PyAsyncGen_Fini(PyThreadState *tstate); extern void PyOS_FiniInterrupts(void); -extern void _PyExc_Fini(void); +extern void _PyExc_Fini(PyThreadState *tstate); extern void _PyImport_Fini(void); extern void _PyImport_Fini2(void); extern void _PyGC_Fini(PyThreadState *tstate); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index a62383d2093ec..9b94bcc016927 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -1,10 +1,9 @@ Each interpreter now its has own free lists, singletons and caches: * Free lists: float, tuple, list, dict, frame, context, - asynchronous generator. + asynchronous generator, MemoryError. * Singletons: empty tuple, empty bytes string, single byte character. * Slice cache. They are no longer shared by all interpreters. - diff --git a/Objects/exceptions.c b/Objects/exceptions.c index db5e3da12b00f..1195ba17922dd 100644 --- a/Objects/exceptions.c +++ b/Objects/exceptions.c @@ -19,8 +19,13 @@ PyObject *PyExc_IOError = NULL; PyObject *PyExc_WindowsError = NULL; #endif -/* The dict map from errno codes to OSError subclasses */ -static PyObject *errnomap = NULL; + +static struct _Py_exc_state* +get_exc_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->exc_state; +} /* NOTE: If the exception class hierarchy changes, don't forget to update @@ -985,10 +990,11 @@ OSError_new(PyTypeObject *type, PyObject *args, PyObject *kwds) )) goto error; + struct _Py_exc_state *state = get_exc_state(); if (myerrno && PyLong_Check(myerrno) && - errnomap && (PyObject *) type == PyExc_OSError) { + state->errnomap && (PyObject *) type == PyExc_OSError) { PyObject *newtype; - newtype = PyDict_GetItemWithError(errnomap, myerrno); + newtype = PyDict_GetItemWithError(state->errnomap, myerrno); if (newtype) { assert(PyType_Check(newtype)); type = (PyTypeObject *) newtype; @@ -2274,8 +2280,6 @@ SimpleExtendsException(PyExc_Exception, ReferenceError, */ #define MEMERRORS_SAVE 16 -static PyBaseExceptionObject *memerrors_freelist = NULL; -static int memerrors_numfree = 0; static PyObject * MemoryError_new(PyTypeObject *type, PyObject *args, PyObject *kwds) @@ -2284,16 +2288,22 @@ MemoryError_new(PyTypeObject *type, PyObject *args, PyObject *kwds) if (type != (PyTypeObject *) PyExc_MemoryError) return BaseException_new(type, args, kwds); - if (memerrors_freelist == NULL) + + struct _Py_exc_state *state = get_exc_state(); + if (state->memerrors_freelist == NULL) { return BaseException_new(type, args, kwds); + } + /* Fetch object from freelist and revive it */ - self = memerrors_freelist; + self = state->memerrors_freelist; self->args = PyTuple_New(0); /* This shouldn't happen since the empty tuple is persistent */ - if (self->args == NULL) + if (self->args == NULL) { return NULL; - memerrors_freelist = (PyBaseExceptionObject *) self->dict; - memerrors_numfree--; + } + + state->memerrors_freelist = (PyBaseExceptionObject *) self->dict; + state->memerrors_numfree--; self->dict = NULL; _Py_NewReference((PyObject *)self); _PyObject_GC_TRACK(self); @@ -2305,12 +2315,15 @@ MemoryError_dealloc(PyBaseExceptionObject *self) { _PyObject_GC_UNTRACK(self); BaseException_clear(self); - if (memerrors_numfree >= MEMERRORS_SAVE) + + struct _Py_exc_state *state = get_exc_state(); + if (state->memerrors_numfree >= MEMERRORS_SAVE) { Py_TYPE(self)->tp_free((PyObject *)self); + } else { - self->dict = (PyObject *) memerrors_freelist; - memerrors_freelist = self; - memerrors_numfree++; + self->dict = (PyObject *) state->memerrors_freelist; + state->memerrors_freelist = self; + state->memerrors_numfree++; } } @@ -2335,11 +2348,11 @@ preallocate_memerrors(void) } static void -free_preallocated_memerrors(void) +free_preallocated_memerrors(struct _Py_exc_state *state) { - while (memerrors_freelist != NULL) { - PyObject *self = (PyObject *) memerrors_freelist; - memerrors_freelist = (PyBaseExceptionObject *) memerrors_freelist->dict; + while (state->memerrors_freelist != NULL) { + PyObject *self = (PyObject *) state->memerrors_freelist; + state->memerrors_freelist = (PyBaseExceptionObject *)state->memerrors_freelist->dict; Py_TYPE(self)->tp_free((PyObject *)self); } } @@ -2507,8 +2520,10 @@ SimpleExtendsException(PyExc_Warning, ResourceWarning, #endif /* MS_WINDOWS */ PyStatus -_PyExc_Init(void) +_PyExc_Init(PyThreadState *tstate) { + struct _Py_exc_state *state = &tstate->interp->exc_state; + #define PRE_INIT(TYPE) \ if (!(_PyExc_ ## TYPE.tp_flags & Py_TPFLAGS_READY)) { \ if (PyType_Ready(&_PyExc_ ## TYPE) < 0) { \ @@ -2521,7 +2536,7 @@ _PyExc_Init(void) do { \ PyObject *_code = PyLong_FromLong(CODE); \ assert(_PyObject_RealIsSubclass(PyExc_ ## TYPE, PyExc_OSError)); \ - if (!_code || PyDict_SetItem(errnomap, _code, PyExc_ ## TYPE)) \ + if (!_code || PyDict_SetItem(state->errnomap, _code, PyExc_ ## TYPE)) \ return _PyStatus_ERR("errmap insertion problem."); \ Py_DECREF(_code); \ } while (0) @@ -2595,15 +2610,14 @@ _PyExc_Init(void) PRE_INIT(TimeoutError); if (preallocate_memerrors() < 0) { - return _PyStatus_ERR("Could not preallocate MemoryError object"); + return _PyStatus_NO_MEMORY(); } /* Add exceptions to errnomap */ - if (!errnomap) { - errnomap = PyDict_New(); - if (!errnomap) { - return _PyStatus_ERR("Cannot allocate map from errnos to OSError subclasses"); - } + assert(state->errnomap == NULL); + state->errnomap = PyDict_New(); + if (!state->errnomap) { + return _PyStatus_NO_MEMORY(); } ADD_ERRNO(BlockingIOError, EAGAIN); @@ -2741,10 +2755,11 @@ _PyBuiltins_AddExceptions(PyObject *bltinmod) } void -_PyExc_Fini(void) +_PyExc_Fini(PyThreadState *tstate) { - free_preallocated_memerrors(); - Py_CLEAR(errnomap); + struct _Py_exc_state *state = &tstate->interp->exc_state; + free_preallocated_memerrors(state); + Py_CLEAR(state->errnomap); } /* Helper to do the equivalent of "raise X from Y" in C, but always using diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 09945a8f7a6a0..f0b40b3aa68e3 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -602,7 +602,7 @@ pycore_init_types(PyThreadState *tstate) } } - status = _PyExc_Init(); + status = _PyExc_Init(tstate); if (_PyStatus_EXCEPTION(status)) { return status; } @@ -1249,6 +1249,7 @@ flush_std_files(void) static void finalize_interp_types(PyThreadState *tstate, int is_main_interp) { + _PyExc_Fini(tstate); _PyFrame_Fini(tstate); _PyAsyncGen_Fini(tstate); _PyContext_Fini(tstate); @@ -1289,10 +1290,6 @@ finalize_interp_clear(PyThreadState *tstate) _PyWarnings_Fini(tstate->interp); - if (is_main_interp) { - _PyExc_Fini(); - } - finalize_interp_types(tstate, is_main_interp); } From webhook-mailer at python.org Tue Jun 23 17:50:05 2020 From: webhook-mailer at python.org (Anthony Sottile) Date: Tue, 23 Jun 2020 21:50:05 -0000 Subject: [Python-checkins] bpo-31938: Fix default-value signatures of several functions in the select module (GH-21066) Message-ID: https://github.com/python/cpython/commit/d051801052211b533c46a593b1c1bccf649a171c commit: d051801052211b533c46a593b1c1bccf649a171c branch: master author: Anthony Sottile committer: GitHub date: 2020-06-23T23:49:56+02:00 summary: bpo-31938: Fix default-value signatures of several functions in the select module (GH-21066) files: A Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst M Modules/clinic/selectmodule.c.h M Modules/selectmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst b/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst new file mode 100644 index 0000000000000..0488e94d42e8c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst @@ -0,0 +1 @@ +Fix default-value signatures of several functions in the :mod:`select` module - by Anthony Sottile. diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index cd7f3846da695..3a06d6d0ec900 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -65,7 +65,8 @@ select_select(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #if (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) PyDoc_STRVAR(select_poll_register__doc__, -"register($self, fd, eventmask=POLLIN | POLLPRI | POLLOUT, /)\n" +"register($self, fd,\n" +" eventmask=select.POLLIN | select.POLLPRI | select.POLLOUT, /)\n" "--\n" "\n" "Register a file descriptor with the polling object.\n" @@ -226,7 +227,8 @@ select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) #if (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) PyDoc_STRVAR(select_devpoll_register__doc__, -"register($self, fd, eventmask=POLLIN | POLLPRI | POLLOUT, /)\n" +"register($self, fd,\n" +" eventmask=select.POLLIN | select.POLLPRI | select.POLLOUT, /)\n" "--\n" "\n" "Register a file descriptor with the polling object.\n" @@ -275,7 +277,8 @@ select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t n #if (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) PyDoc_STRVAR(select_devpoll_modify__doc__, -"modify($self, fd, eventmask=POLLIN | POLLPRI | POLLOUT, /)\n" +"modify($self, fd,\n" +" eventmask=select.POLLIN | select.POLLPRI | select.POLLOUT, /)\n" "--\n" "\n" "Modify a possible already registered file descriptor.\n" @@ -630,7 +633,8 @@ select_epoll_fromfd(PyTypeObject *type, PyObject *arg) #if defined(HAVE_EPOLL) PyDoc_STRVAR(select_epoll_register__doc__, -"register($self, /, fd, eventmask=EPOLLIN | EPOLLPRI | EPOLLOUT)\n" +"register($self, /, fd,\n" +" eventmask=select.EPOLLIN | select.EPOLLPRI | select.EPOLLOUT)\n" "--\n" "\n" "Registers a new fd or raises an OSError if the fd is already registered.\n" @@ -1175,4 +1179,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=a055330869acbd16 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=7144233c42e18279 input=a9049054013a1b77]*/ diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index adf014fac43d4..13ffe09c6d4f8 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -458,7 +458,7 @@ select.poll.register fd: fildes either an integer, or an object with a fileno() method returning an int - eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = POLLIN | POLLPRI | POLLOUT + eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = select.POLLIN | select.POLLPRI | select.POLLOUT an optional bitmask describing the type of events to check for / @@ -467,7 +467,7 @@ Register a file descriptor with the polling object. static PyObject * select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=0dc7173c800a4a65 input=f18711d9bb021e25]*/ +/*[clinic end generated code: output=0dc7173c800a4a65 input=34e16cfb28d3c900]*/ { PyObject *key, *value; int err; @@ -845,7 +845,7 @@ select.devpoll.register fd: fildes either an integer, or an object with a fileno() method returning an int - eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = POLLIN | POLLPRI | POLLOUT + eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = select.POLLIN | select.POLLPRI | select.POLLOUT an optional bitmask describing the type of events to check for / @@ -855,7 +855,7 @@ Register a file descriptor with the polling object. static PyObject * select_devpoll_register_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=6e07fe8b74abba0c input=5bd7cacc47a8ee46]*/ +/*[clinic end generated code: output=6e07fe8b74abba0c input=22006fabe9567522]*/ { return internal_devpoll_register(self, fd, eventmask, 0); } @@ -866,7 +866,7 @@ select.devpoll.modify fd: fildes either an integer, or an object with a fileno() method returning an int - eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = POLLIN | POLLPRI | POLLOUT + eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = select.POLLIN | select.POLLPRI | select.POLLOUT an optional bitmask describing the type of events to check for / @@ -876,7 +876,7 @@ Modify a possible already registered file descriptor. static PyObject * select_devpoll_modify_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=bc2e6d23aaff98b4 input=48a820fc5967165d]*/ +/*[clinic end generated code: output=bc2e6d23aaff98b4 input=09fa335db7cdc09e]*/ { return internal_devpoll_register(self, fd, eventmask, 1); } @@ -1475,7 +1475,7 @@ select.epoll.register fd: fildes the target file descriptor of the operation - eventmask: unsigned_int(c_default="EPOLLIN | EPOLLPRI | EPOLLOUT", bitwise=True) = EPOLLIN | EPOLLPRI | EPOLLOUT + eventmask: unsigned_int(c_default="EPOLLIN | EPOLLPRI | EPOLLOUT", bitwise=True) = select.EPOLLIN | select.EPOLLPRI | select.EPOLLOUT a bit set composed of the various EPOLL constants Registers a new fd or raises an OSError if the fd is already registered. @@ -1486,7 +1486,7 @@ The epoll interface supports all file descriptors that support poll. static PyObject * select_epoll_register_impl(pyEpoll_Object *self, int fd, unsigned int eventmask) -/*[clinic end generated code: output=318e5e6386520599 input=6cf699c152dd8ca9]*/ +/*[clinic end generated code: output=318e5e6386520599 input=a5071b71edfe3578]*/ { return pyepoll_internal_ctl(self->epfd, EPOLL_CTL_ADD, fd, eventmask); } From webhook-mailer at python.org Tue Jun 23 18:10:48 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 22:10:48 -0000 Subject: [Python-checkins] bpo-40521: Make empty Unicode string per interpreter (GH-21096) Message-ID: https://github.com/python/cpython/commit/f363d0a6e9cfa50677a6de203735fbc0d06c2f49 commit: f363d0a6e9cfa50677a6de203735fbc0d06c2f49 branch: master author: Victor Stinner committer: GitHub date: 2020-06-24T00:10:40+02:00 summary: bpo-40521: Make empty Unicode string per interpreter (GH-21096) Each interpreter now has its own empty Unicode string singleton. files: M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Objects/stringlib/asciilib.h M Objects/stringlib/partition.h M Objects/stringlib/stringdefs.h M Objects/stringlib/ucs1lib.h M Objects/stringlib/ucs2lib.h M Objects/stringlib/ucs4lib.h M Objects/stringlib/unicodedefs.h M Objects/unicodeobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index 435a72a522011..d8947e700f84e 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -71,6 +71,8 @@ struct _Py_bytes_state { }; struct _Py_unicode_state { + // The empty Unicode object is a singleton to improve performance. + PyObject *empty; struct _Py_unicode_fs_codec fs_codec; }; diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index cd470441817a2..f29c7cb9f392c 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -31,7 +31,7 @@ PyAPI_FUNC(int) _Py_IsLocaleCoercionTarget(const char *ctype_loc); /* Various one-time initializers */ -extern PyStatus _PyUnicode_Init(void); +extern PyStatus _PyUnicode_Init(PyThreadState *tstate); extern int _PyStructSequence_Init(void); extern int _PyLong_Init(PyThreadState *tstate); extern PyStatus _PyFaulthandler_Init(int enable); diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index 9b94bcc016927..e970551f531d0 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -2,7 +2,7 @@ Each interpreter now its has own free lists, singletons and caches: * Free lists: float, tuple, list, dict, frame, context, asynchronous generator, MemoryError. -* Singletons: empty tuple, empty bytes string, +* Singletons: empty tuple, empty bytes string, empty Unicode string, single byte character. * Slice cache. diff --git a/Objects/stringlib/asciilib.h b/Objects/stringlib/asciilib.h index 8599d38a5a7f5..7749e8fb33982 100644 --- a/Objects/stringlib/asciilib.h +++ b/Objects/stringlib/asciilib.h @@ -11,7 +11,6 @@ #define STRINGLIB_CHAR Py_UCS1 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/partition.h b/Objects/stringlib/partition.h index 3731df56987fd..bcc217697b2e9 100644 --- a/Objects/stringlib/partition.h +++ b/Objects/stringlib/partition.h @@ -1,9 +1,14 @@ /* stringlib: partition implementation */ #ifndef STRINGLIB_FASTSEARCH_H -#error must include "stringlib/fastsearch.h" before including this module +# error must include "stringlib/fastsearch.h" before including this module #endif +#if !STRINGLIB_MUTABLE && !defined(STRINGLIB_GET_EMPTY) +# error "STRINGLIB_GET_EMPTY must be defined if STRINGLIB_MUTABLE is zero" +#endif + + Py_LOCAL_INLINE(PyObject*) STRINGLIB(partition)(PyObject* str_obj, const STRINGLIB_CHAR* str, Py_ssize_t str_len, diff --git a/Objects/stringlib/stringdefs.h b/Objects/stringlib/stringdefs.h index c12ecc59e5c6d..88641b25d47c6 100644 --- a/Objects/stringlib/stringdefs.h +++ b/Objects/stringlib/stringdefs.h @@ -1,10 +1,6 @@ #ifndef STRINGLIB_STRINGDEFS_H #define STRINGLIB_STRINGDEFS_H -#ifndef STRINGLIB_GET_EMPTY -# error "STRINGLIB_GET_EMPTY macro must be defined" -#endif - /* this is sort of a hack. there's at least one place (formatting floats) where some stringlib code takes a different path if it's compiled as unicode. */ diff --git a/Objects/stringlib/ucs1lib.h b/Objects/stringlib/ucs1lib.h index bdf30356b8457..5b0b8a025e808 100644 --- a/Objects/stringlib/ucs1lib.h +++ b/Objects/stringlib/ucs1lib.h @@ -11,7 +11,6 @@ #define STRINGLIB_CHAR Py_UCS1 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/ucs2lib.h b/Objects/stringlib/ucs2lib.h index 9d6888801867d..6af01511c5f8a 100644 --- a/Objects/stringlib/ucs2lib.h +++ b/Objects/stringlib/ucs2lib.h @@ -11,7 +11,6 @@ #define STRINGLIB_CHAR Py_UCS2 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/ucs4lib.h b/Objects/stringlib/ucs4lib.h index c7dfa527433e3..39071a0cdf0cd 100644 --- a/Objects/stringlib/ucs4lib.h +++ b/Objects/stringlib/ucs4lib.h @@ -11,7 +11,6 @@ #define STRINGLIB_CHAR Py_UCS4 #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/stringlib/unicodedefs.h b/Objects/stringlib/unicodedefs.h index e4d4163afc2f9..5ea79cd4f50ac 100644 --- a/Objects/stringlib/unicodedefs.h +++ b/Objects/stringlib/unicodedefs.h @@ -13,7 +13,6 @@ #define STRINGLIB_CHAR Py_UNICODE #define STRINGLIB_TYPE_NAME "unicode" #define STRINGLIB_PARSE_CODE "U" -#define STRINGLIB_GET_EMPTY() unicode_empty #define STRINGLIB_ISSPACE Py_UNICODE_ISSPACE #define STRINGLIB_ISLINEBREAK BLOOM_LINEBREAK #define STRINGLIB_ISDECIMAL Py_UNICODE_ISDECIMAL diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 1433848c81f8e..06ca7a5751d2f 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -222,26 +222,43 @@ extern "C" { static PyObject *interned = NULL; #endif -/* The empty Unicode object is shared to improve performance. */ -static PyObject *unicode_empty = NULL; +static struct _Py_unicode_state* +get_unicode_state(void) +{ + PyInterpreterState *interp = _PyInterpreterState_GET(); + return &interp->unicode; +} -#define _Py_INCREF_UNICODE_EMPTY() \ - do { \ - if (unicode_empty != NULL) \ - Py_INCREF(unicode_empty); \ - else { \ - unicode_empty = PyUnicode_New(0, 0); \ - if (unicode_empty != NULL) { \ - Py_INCREF(unicode_empty); \ - assert(_PyUnicode_CheckConsistency(unicode_empty, 1)); \ - } \ - } \ - } while (0) -#define _Py_RETURN_UNICODE_EMPTY() \ - do { \ - _Py_INCREF_UNICODE_EMPTY(); \ - return unicode_empty; \ +// Return a borrowed reference to the empty string singleton. +// Return NULL if the singleton was not created yet. +static inline PyObject* unicode_get_empty(void) +{ + struct _Py_unicode_state *state = get_unicode_state(); + return state->empty; +} + +static inline PyObject* unicode_new_empty(void) +{ + struct _Py_unicode_state *state = get_unicode_state(); + PyObject *empty = state->empty; + if (empty != NULL) { + Py_INCREF(empty); + } + else { + empty = PyUnicode_New(0, 0); + if (empty != NULL) { + Py_INCREF(empty); + assert(_PyUnicode_CheckConsistency(empty, 1)); + state->empty = empty; + } + } + return empty; +} + +#define _Py_RETURN_UNICODE_EMPTY() \ + do { \ + return unicode_new_empty(); \ } while (0) static inline void @@ -676,11 +693,15 @@ unicode_result_ready(PyObject *unicode) length = PyUnicode_GET_LENGTH(unicode); if (length == 0) { - if (unicode != unicode_empty) { + PyObject *empty = unicode_get_empty(); + if (unicode != empty) { Py_DECREF(unicode); - _Py_RETURN_UNICODE_EMPTY(); + + Py_INCREF(empty); + return empty; } - return unicode_empty; + // unicode is the empty string singleton + return unicode; } #ifdef LATIN1_SINGLETONS @@ -864,7 +885,7 @@ xmlcharrefreplace(_PyBytesWriter *writer, char *str, to keep things simple, we use a single bitmask, using the least 5 bits from each unicode characters as the bit index. */ -/* the linebreak mask is set up by Unicode_Init below */ +/* the linebreak mask is set up by _PyUnicode_Init() below */ #if LONG_BIT >= 128 #define BLOOM_WIDTH 128 @@ -938,6 +959,8 @@ ensure_unicode(PyObject *obj) /* Compilation of templated routines */ +#define STRINGLIB_GET_EMPTY() unicode_get_empty() + #include "stringlib/asciilib.h" #include "stringlib/fastsearch.h" #include "stringlib/partition.h" @@ -986,6 +1009,8 @@ _Py_COMP_DIAG_IGNORE_DEPR_DECLS #include "stringlib/undef.h" _Py_COMP_DIAG_POP +#undef STRINGLIB_GET_EMPTY + /* --- Unicode Object ----------------------------------------------------- */ static inline Py_ssize_t @@ -1234,9 +1259,12 @@ _PyUnicode_New(Py_ssize_t length) size_t new_size; /* Optimization for empty strings */ - if (length == 0 && unicode_empty != NULL) { - Py_INCREF(unicode_empty); - return (PyUnicodeObject*)unicode_empty; + if (length == 0) { + PyObject *empty = unicode_get_empty(); + if (empty != NULL) { + Py_INCREF(empty); + return (PyUnicodeObject *)empty; + } } /* Ensure we won't overflow the size. */ @@ -1386,6 +1414,15 @@ _PyUnicode_Dump(PyObject *op) PyObject * PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) { + /* Optimization for empty strings */ + if (size == 0) { + PyObject *empty = unicode_get_empty(); + if (empty != NULL) { + Py_INCREF(empty); + return empty; + } + } + PyObject *obj; PyCompactUnicodeObject *unicode; void *data; @@ -1394,12 +1431,6 @@ PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) Py_ssize_t char_size; Py_ssize_t struct_size; - /* Optimization for empty strings */ - if (size == 0 && unicode_empty != NULL) { - Py_INCREF(unicode_empty); - return unicode_empty; - } - is_ascii = 0; is_sharing = 0; struct_size = sizeof(PyCompactUnicodeObject); @@ -1970,7 +2001,8 @@ unicode_dealloc(PyObject *unicode) static int unicode_is_singleton(PyObject *unicode) { - if (unicode == unicode_empty) { + struct _Py_unicode_state *state = get_unicode_state(); + if (unicode == state->empty) { return 1; } #ifdef LATIN1_SINGLETONS @@ -2026,10 +2058,10 @@ unicode_resize(PyObject **p_unicode, Py_ssize_t length) return 0; if (length == 0) { - _Py_INCREF_UNICODE_EMPTY(); - if (!unicode_empty) + PyObject *empty = unicode_new_empty(); + if (!empty) return -1; - Py_SETREF(*p_unicode, unicode_empty); + Py_SETREF(*p_unicode, empty); return 0; } @@ -10836,10 +10868,10 @@ replace(PyObject *self, PyObject *str1, } new_size = slen + n * (len2 - len1); if (new_size == 0) { - _Py_INCREF_UNICODE_EMPTY(); - if (!unicode_empty) + PyObject *empty = unicode_new_empty(); + if (!empty) goto error; - u = unicode_empty; + u = empty; goto done; } if (new_size > (PY_SSIZE_T_MAX / rkind)) { @@ -11497,10 +11529,13 @@ PyUnicode_Concat(PyObject *left, PyObject *right) return NULL; /* Shortcuts */ - if (left == unicode_empty) + PyObject *empty = unicode_get_empty(); // Borrowed reference + if (left == empty) { return PyUnicode_FromObject(right); - if (right == unicode_empty) + } + if (right == empty) { return PyUnicode_FromObject(left); + } left_len = PyUnicode_GET_LENGTH(left); right_len = PyUnicode_GET_LENGTH(right); @@ -11551,14 +11586,16 @@ PyUnicode_Append(PyObject **p_left, PyObject *right) goto error; /* Shortcuts */ - if (left == unicode_empty) { + PyObject *empty = unicode_get_empty(); // Borrowed reference + if (left == empty) { Py_DECREF(left); Py_INCREF(right); *p_left = right; return; } - if (right == unicode_empty) + if (right == empty) { return; + } left_len = PyUnicode_GET_LENGTH(left); right_len = PyUnicode_GET_LENGTH(right); @@ -13255,12 +13292,12 @@ PyUnicode_Partition(PyObject *str_obj, PyObject *sep_obj) len1 = PyUnicode_GET_LENGTH(str_obj); len2 = PyUnicode_GET_LENGTH(sep_obj); if (kind1 < kind2 || len1 < len2) { - _Py_INCREF_UNICODE_EMPTY(); - if (!unicode_empty) + PyObject *empty = unicode_get_empty(); // Borrowed reference + if (!empty) { out = NULL; + } else { - out = PyTuple_Pack(3, str_obj, unicode_empty, unicode_empty); - Py_DECREF(unicode_empty); + out = PyTuple_Pack(3, str_obj, empty, empty); } return out; } @@ -13313,12 +13350,12 @@ PyUnicode_RPartition(PyObject *str_obj, PyObject *sep_obj) len1 = PyUnicode_GET_LENGTH(str_obj); len2 = PyUnicode_GET_LENGTH(sep_obj); if (kind1 < kind2 || len1 < len2) { - _Py_INCREF_UNICODE_EMPTY(); - if (!unicode_empty) + PyObject *empty = unicode_get_empty(); // Borrowed reference + if (!empty) { out = NULL; + } else { - out = PyTuple_Pack(3, unicode_empty, unicode_empty, str_obj); - Py_DECREF(unicode_empty); + out = PyTuple_Pack(3, empty, empty, str_obj); } return out; } @@ -15538,10 +15575,10 @@ PyTypeObject PyUnicode_Type = { /* Initialize the Unicode implementation */ PyStatus -_PyUnicode_Init(void) +_PyUnicode_Init(PyThreadState *tstate) { /* XXX - move this array to unicodectype.c ? */ - Py_UCS2 linebreak[] = { + const Py_UCS2 linebreak[] = { 0x000A, /* LINE FEED */ 0x000D, /* CARRIAGE RETURN */ 0x001C, /* FILE SEPARATOR */ @@ -15553,29 +15590,31 @@ _PyUnicode_Init(void) }; /* Init the implementation */ - _Py_INCREF_UNICODE_EMPTY(); - if (!unicode_empty) { - return _PyStatus_ERR("Can't create empty string"); + PyObject *empty = unicode_new_empty(); + if (!empty) { + return _PyStatus_NO_MEMORY(); } - Py_DECREF(unicode_empty); + Py_DECREF(empty); - if (PyType_Ready(&PyUnicode_Type) < 0) { - return _PyStatus_ERR("Can't initialize unicode type"); - } + if (_Py_IsMainInterpreter(tstate)) { + /* initialize the linebreak bloom filter */ + bloom_linebreak = make_bloom_mask( + PyUnicode_2BYTE_KIND, linebreak, + Py_ARRAY_LENGTH(linebreak)); - /* initialize the linebreak bloom filter */ - bloom_linebreak = make_bloom_mask( - PyUnicode_2BYTE_KIND, linebreak, - Py_ARRAY_LENGTH(linebreak)); + if (PyType_Ready(&PyUnicode_Type) < 0) { + return _PyStatus_ERR("Can't initialize unicode type"); + } - if (PyType_Ready(&EncodingMapType) < 0) { - return _PyStatus_ERR("Can't initialize encoding map type"); - } - if (PyType_Ready(&PyFieldNameIter_Type) < 0) { - return _PyStatus_ERR("Can't initialize field name iterator type"); - } - if (PyType_Ready(&PyFormatterIter_Type) < 0) { - return _PyStatus_ERR("Can't initialize formatter iter type"); + if (PyType_Ready(&EncodingMapType) < 0) { + return _PyStatus_ERR("Can't initialize encoding map type"); + } + if (PyType_Ready(&PyFieldNameIter_Type) < 0) { + return _PyStatus_ERR("Can't initialize field name iterator type"); + } + if (PyType_Ready(&PyFormatterIter_Type) < 0) { + return _PyStatus_ERR("Can't initialize formatter iter type"); + } } return _PyStatus_OK(); } @@ -16205,7 +16244,10 @@ _PyUnicode_EnableLegacyWindowsFSEncoding(void) void _PyUnicode_Fini(PyThreadState *tstate) { - if (_Py_IsMainInterpreter(tstate)) { + struct _Py_unicode_state *state = &tstate->interp->unicode; + + int is_main_interp = _Py_IsMainInterpreter(tstate); + if (is_main_interp) { #if defined(WITH_VALGRIND) || defined(__INSURE__) /* Insure++ is a memory analysis tool that aids in discovering * memory leaks and other memory problems. On Python exit, the @@ -16218,9 +16260,11 @@ _PyUnicode_Fini(PyThreadState *tstate) */ unicode_release_interned(); #endif /* __INSURE__ */ + } - Py_CLEAR(unicode_empty); + Py_CLEAR(state->empty); + if (is_main_interp) { #ifdef LATIN1_SINGLETONS for (Py_ssize_t i = 0; i < 256; i++) { Py_CLEAR(unicode_latin1[i]); diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index f0b40b3aa68e3..eda4c6ad7e474 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -595,11 +595,9 @@ pycore_init_types(PyThreadState *tstate) return _PyStatus_ERR("can't init longs"); } - if (is_main_interp) { - status = _PyUnicode_Init(); - if (_PyStatus_EXCEPTION(status)) { - return status; - } + status = _PyUnicode_Init(tstate); + if (_PyStatus_EXCEPTION(status)) { + return status; } status = _PyExc_Init(tstate); From webhook-mailer at python.org Tue Jun 23 18:17:07 2020 From: webhook-mailer at python.org (Anthony Sottile) Date: Tue, 23 Jun 2020 22:17:07 -0000 Subject: [Python-checkins] [3.8] bpo-31938: Fix default-value signatures of several functions in the select module (GH-21066) (GH-21098) Message-ID: https://github.com/python/cpython/commit/60cbdc81d1bd5c175ff890ee4cfdc85c10090b75 commit: 60cbdc81d1bd5c175ff890ee4cfdc85c10090b75 branch: 3.8 author: Anthony Sottile committer: GitHub date: 2020-06-23T15:17:02-07:00 summary: [3.8] bpo-31938: Fix default-value signatures of several functions in the select module (GH-21066) (GH-21098) (cherry picked from commit d051801052211b533c46a593b1c1bccf649a171c) Automerge-Triggered-By: @vstinner files: A Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst M Modules/clinic/selectmodule.c.h M Modules/selectmodule.c diff --git a/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst b/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst new file mode 100644 index 0000000000000..0488e94d42e8c --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst @@ -0,0 +1 @@ +Fix default-value signatures of several functions in the :mod:`select` module - by Anthony Sottile. diff --git a/Modules/clinic/selectmodule.c.h b/Modules/clinic/selectmodule.c.h index 51855d96c37d7..a9e14840710c7 100644 --- a/Modules/clinic/selectmodule.c.h +++ b/Modules/clinic/selectmodule.c.h @@ -65,7 +65,8 @@ select_select(PyObject *module, PyObject *const *args, Py_ssize_t nargs) #if (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) PyDoc_STRVAR(select_poll_register__doc__, -"register($self, fd, eventmask=POLLIN | POLLPRI | POLLOUT, /)\n" +"register($self, fd,\n" +" eventmask=select.POLLIN | select.POLLPRI | select.POLLOUT, /)\n" "--\n" "\n" "Register a file descriptor with the polling object.\n" @@ -226,7 +227,8 @@ select_poll_poll(pollObject *self, PyObject *const *args, Py_ssize_t nargs) #if (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) PyDoc_STRVAR(select_devpoll_register__doc__, -"register($self, fd, eventmask=POLLIN | POLLPRI | POLLOUT, /)\n" +"register($self, fd,\n" +" eventmask=select.POLLIN | select.POLLPRI | select.POLLOUT, /)\n" "--\n" "\n" "Register a file descriptor with the polling object.\n" @@ -275,7 +277,8 @@ select_devpoll_register(devpollObject *self, PyObject *const *args, Py_ssize_t n #if (defined(HAVE_POLL) && !defined(HAVE_BROKEN_POLL)) && defined(HAVE_SYS_DEVPOLL_H) PyDoc_STRVAR(select_devpoll_modify__doc__, -"modify($self, fd, eventmask=POLLIN | POLLPRI | POLLOUT, /)\n" +"modify($self, fd,\n" +" eventmask=select.POLLIN | select.POLLPRI | select.POLLOUT, /)\n" "--\n" "\n" "Modify a possible already registered file descriptor.\n" @@ -645,7 +648,8 @@ select_epoll_fromfd(PyTypeObject *type, PyObject *arg) #if defined(HAVE_EPOLL) PyDoc_STRVAR(select_epoll_register__doc__, -"register($self, /, fd, eventmask=EPOLLIN | EPOLLPRI | EPOLLOUT)\n" +"register($self, /, fd,\n" +" eventmask=select.EPOLLIN | select.EPOLLPRI | select.EPOLLOUT)\n" "--\n" "\n" "Registers a new fd or raises an OSError if the fd is already registered.\n" @@ -1215,4 +1219,4 @@ select_kqueue_control(kqueue_queue_Object *self, PyObject *const *args, Py_ssize #ifndef SELECT_KQUEUE_CONTROL_METHODDEF #define SELECT_KQUEUE_CONTROL_METHODDEF #endif /* !defined(SELECT_KQUEUE_CONTROL_METHODDEF) */ -/*[clinic end generated code: output=9b4b1e1cae1f3afb input=a9049054013a1b77]*/ +/*[clinic end generated code: output=86010dde10ca89c6 input=a9049054013a1b77]*/ diff --git a/Modules/selectmodule.c b/Modules/selectmodule.c index 31b64c4c8a45a..a71b64223d049 100644 --- a/Modules/selectmodule.c +++ b/Modules/selectmodule.c @@ -439,7 +439,7 @@ select.poll.register fd: fildes either an integer, or an object with a fileno() method returning an int - eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = POLLIN | POLLPRI | POLLOUT + eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = select.POLLIN | select.POLLPRI | select.POLLOUT an optional bitmask describing the type of events to check for / @@ -448,7 +448,7 @@ Register a file descriptor with the polling object. static PyObject * select_poll_register_impl(pollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=0dc7173c800a4a65 input=f18711d9bb021e25]*/ +/*[clinic end generated code: output=0dc7173c800a4a65 input=34e16cfb28d3c900]*/ { PyObject *key, *value; int err; @@ -817,7 +817,7 @@ select.devpoll.register fd: fildes either an integer, or an object with a fileno() method returning an int - eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = POLLIN | POLLPRI | POLLOUT + eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = select.POLLIN | select.POLLPRI | select.POLLOUT an optional bitmask describing the type of events to check for / @@ -827,7 +827,7 @@ Register a file descriptor with the polling object. static PyObject * select_devpoll_register_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=6e07fe8b74abba0c input=5bd7cacc47a8ee46]*/ +/*[clinic end generated code: output=6e07fe8b74abba0c input=22006fabe9567522]*/ { return internal_devpoll_register(self, fd, eventmask, 0); } @@ -838,7 +838,7 @@ select.devpoll.modify fd: fildes either an integer, or an object with a fileno() method returning an int - eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = POLLIN | POLLPRI | POLLOUT + eventmask: unsigned_short(c_default="POLLIN | POLLPRI | POLLOUT") = select.POLLIN | select.POLLPRI | select.POLLOUT an optional bitmask describing the type of events to check for / @@ -848,7 +848,7 @@ Modify a possible already registered file descriptor. static PyObject * select_devpoll_modify_impl(devpollObject *self, int fd, unsigned short eventmask) -/*[clinic end generated code: output=bc2e6d23aaff98b4 input=48a820fc5967165d]*/ +/*[clinic end generated code: output=bc2e6d23aaff98b4 input=09fa335db7cdc09e]*/ { return internal_devpoll_register(self, fd, eventmask, 1); } @@ -1424,7 +1424,7 @@ select.epoll.register fd: fildes the target file descriptor of the operation - eventmask: unsigned_int(c_default="EPOLLIN | EPOLLPRI | EPOLLOUT", bitwise=True) = EPOLLIN | EPOLLPRI | EPOLLOUT + eventmask: unsigned_int(c_default="EPOLLIN | EPOLLPRI | EPOLLOUT", bitwise=True) = select.EPOLLIN | select.EPOLLPRI | select.EPOLLOUT a bit set composed of the various EPOLL constants Registers a new fd or raises an OSError if the fd is already registered. @@ -1435,7 +1435,7 @@ The epoll interface supports all file descriptors that support poll. static PyObject * select_epoll_register_impl(pyEpoll_Object *self, int fd, unsigned int eventmask) -/*[clinic end generated code: output=318e5e6386520599 input=6cf699c152dd8ca9]*/ +/*[clinic end generated code: output=318e5e6386520599 input=a5071b71edfe3578]*/ { return pyepoll_internal_ctl(self->epfd, EPOLL_CTL_ADD, fd, eventmask); } From webhook-mailer at python.org Tue Jun 23 18:34:16 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Tue, 23 Jun 2020 22:34:16 -0000 Subject: [Python-checkins] bpo-40521: Optimize PyUnicode_New(0, maxchar) (GH-21099) Message-ID: https://github.com/python/cpython/commit/90ed8a6d71b2d6e0853c14e8e6f85fe730a4329a commit: 90ed8a6d71b2d6e0853c14e8e6f85fe730a4329a branch: master author: Victor Stinner committer: GitHub date: 2020-06-24T00:34:07+02:00 summary: bpo-40521: Optimize PyUnicode_New(0, maxchar) (GH-21099) Functions of unicodeobject.c, like PyUnicode_New(), no longer check if the empty Unicode singleton has been initialized or not. Consider that it is always initialized. The Unicode API must not be used before _PyUnicode_Init() or after _PyUnicode_Fini(). files: M Objects/unicodeobject.c diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 06ca7a5751d2f..e4235b1aca3cf 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -231,28 +231,19 @@ get_unicode_state(void) // Return a borrowed reference to the empty string singleton. -// Return NULL if the singleton was not created yet. static inline PyObject* unicode_get_empty(void) { struct _Py_unicode_state *state = get_unicode_state(); + // unicode_get_empty() must not be called before _PyUnicode_Init() + // or after _PyUnicode_Fini() + assert(state->empty != NULL); return state->empty; } static inline PyObject* unicode_new_empty(void) { - struct _Py_unicode_state *state = get_unicode_state(); - PyObject *empty = state->empty; - if (empty != NULL) { - Py_INCREF(empty); - } - else { - empty = PyUnicode_New(0, 0); - if (empty != NULL) { - Py_INCREF(empty); - assert(_PyUnicode_CheckConsistency(empty, 1)); - state->empty = empty; - } - } + PyObject *empty = unicode_get_empty(); + Py_INCREF(empty); return empty; } @@ -696,12 +687,9 @@ unicode_result_ready(PyObject *unicode) PyObject *empty = unicode_get_empty(); if (unicode != empty) { Py_DECREF(unicode); - Py_INCREF(empty); - return empty; } - // unicode is the empty string singleton - return unicode; + return empty; } #ifdef LATIN1_SINGLETONS @@ -959,7 +947,7 @@ ensure_unicode(PyObject *obj) /* Compilation of templated routines */ -#define STRINGLIB_GET_EMPTY() unicode_get_empty() +#define STRINGLIB_GET_EMPTY() unicode_get_empty() #include "stringlib/asciilib.h" #include "stringlib/fastsearch.h" @@ -1260,11 +1248,7 @@ _PyUnicode_New(Py_ssize_t length) /* Optimization for empty strings */ if (length == 0) { - PyObject *empty = unicode_get_empty(); - if (empty != NULL) { - Py_INCREF(empty); - return (PyUnicodeObject *)empty; - } + return (PyUnicodeObject *)unicode_new_empty(); } /* Ensure we won't overflow the size. */ @@ -1416,11 +1400,7 @@ PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) { /* Optimization for empty strings */ if (size == 0) { - PyObject *empty = unicode_get_empty(); - if (empty != NULL) { - Py_INCREF(empty); - return empty; - } + return unicode_new_empty(); } PyObject *obj; @@ -2001,8 +1981,7 @@ unicode_dealloc(PyObject *unicode) static int unicode_is_singleton(PyObject *unicode) { - struct _Py_unicode_state *state = get_unicode_state(); - if (unicode == state->empty) { + if (unicode == unicode_get_empty()) { return 1; } #ifdef LATIN1_SINGLETONS @@ -2059,8 +2038,6 @@ unicode_resize(PyObject **p_unicode, Py_ssize_t length) if (length == 0) { PyObject *empty = unicode_new_empty(); - if (!empty) - return -1; Py_SETREF(*p_unicode, empty); return 0; } @@ -10868,10 +10845,7 @@ replace(PyObject *self, PyObject *str1, } new_size = slen + n * (len2 - len1); if (new_size == 0) { - PyObject *empty = unicode_new_empty(); - if (!empty) - goto error; - u = empty; + u = unicode_new_empty(); goto done; } if (new_size > (PY_SSIZE_T_MAX / rkind)) { @@ -13293,13 +13267,7 @@ PyUnicode_Partition(PyObject *str_obj, PyObject *sep_obj) len2 = PyUnicode_GET_LENGTH(sep_obj); if (kind1 < kind2 || len1 < len2) { PyObject *empty = unicode_get_empty(); // Borrowed reference - if (!empty) { - out = NULL; - } - else { - out = PyTuple_Pack(3, str_obj, empty, empty); - } - return out; + return PyTuple_Pack(3, str_obj, empty, empty); } buf1 = PyUnicode_DATA(str_obj); buf2 = PyUnicode_DATA(sep_obj); @@ -13351,13 +13319,7 @@ PyUnicode_RPartition(PyObject *str_obj, PyObject *sep_obj) len2 = PyUnicode_GET_LENGTH(sep_obj); if (kind1 < kind2 || len1 < len2) { PyObject *empty = unicode_get_empty(); // Borrowed reference - if (!empty) { - out = NULL; - } - else { - out = PyTuple_Pack(3, empty, empty, str_obj); - } - return out; + return PyTuple_Pack(3, empty, empty, str_obj); } buf1 = PyUnicode_DATA(str_obj); buf2 = PyUnicode_DATA(sep_obj); @@ -15589,12 +15551,20 @@ _PyUnicode_Init(PyThreadState *tstate) 0x2029, /* PARAGRAPH SEPARATOR */ }; - /* Init the implementation */ - PyObject *empty = unicode_new_empty(); - if (!empty) { + // Use size=1 rather than size=0, so PyUnicode_New(0, maxchar) can be + // optimized to always use state->empty without having to check if it is + // NULL or not. + PyObject *empty = PyUnicode_New(1, 0); + if (empty == NULL) { return _PyStatus_NO_MEMORY(); } - Py_DECREF(empty); + PyUnicode_1BYTE_DATA(empty)[0] = 0; + _PyUnicode_LENGTH(empty) = 0; + assert(_PyUnicode_CheckConsistency(empty, 1)); + + struct _Py_unicode_state *state = &tstate->interp->unicode; + assert(state->empty == NULL); + state->empty = empty; if (_Py_IsMainInterpreter(tstate)) { /* initialize the linebreak bloom filter */ From webhook-mailer at python.org Tue Jun 23 19:32:31 2020 From: webhook-mailer at python.org (Nikita Nemkin) Date: Tue, 23 Jun 2020 23:32:31 -0000 Subject: [Python-checkins] bpo-41070: Simplify pyshellext.dll build (GH-21037) Message-ID: https://github.com/python/cpython/commit/bbf36e8903f8e86dcad8131c818e122537c30f9e commit: bbf36e8903f8e86dcad8131c818e122537c30f9e branch: master author: Nikita Nemkin committer: GitHub date: 2020-06-24T00:32:23+01:00 summary: bpo-41070: Simplify pyshellext.dll build (GH-21037) Replace MIDL-generated file with manual GUID definition. Use the same .def file for release and debug builds. Update setup build to support latest toolset files: D PC/pyshellext.idl D PC/pyshellext_d.def M PC/pyshellext.cpp M PC/pyshellext.def M PCbuild/pyshellext.vcxproj M PCbuild/pyshellext.vcxproj.filters M Tools/msi/bundle/bootstrap/pythonba.vcxproj diff --git a/PC/pyshellext.cpp b/PC/pyshellext.cpp index 019880264bee9..ffca169857c37 100644 --- a/PC/pyshellext.cpp +++ b/PC/pyshellext.cpp @@ -12,15 +12,14 @@ #include #include -#include "pyshellext_h.h" - #define DDWM_UPDATEWINDOW (WM_USER+3) static HINSTANCE hModule; static CLIPFORMAT cfDropDescription; static CLIPFORMAT cfDragWindow; -static const LPCWSTR CLASS_SUBKEY = L"Software\\Classes\\CLSID\\{BEA218D2-6950-497B-9434-61683EC065FE}"; +#define CLASS_GUID "{BEA218D2-6950-497B-9434-61683EC065FE}" +static const LPCWSTR CLASS_SUBKEY = L"Software\\Classes\\CLSID\\" CLASS_GUID; static const LPCWSTR DRAG_MESSAGE = L"Open with %1"; using namespace Microsoft::WRL; @@ -121,8 +120,7 @@ HRESULT FilenameListCchCopyW(STRSAFE_LPWSTR pszDest, size_t cchDest, LPCWSTR psz return hr; } - -class PyShellExt : public RuntimeClass< +class DECLSPEC_UUID(CLASS_GUID) PyShellExt : public RuntimeClass< RuntimeClassFlags, IDropTarget, IPersistFile @@ -483,7 +481,7 @@ class PyShellExt : public RuntimeClass< } STDMETHODIMP GetClassID(CLSID *pClassID) { - *pClassID = CLSID_PyShellExt; + *pClassID = __uuidof(PyShellExt); return S_OK; } }; diff --git a/PC/pyshellext.def b/PC/pyshellext.def index 5424bd1180d24..288a9adf982f1 100644 --- a/PC/pyshellext.def +++ b/PC/pyshellext.def @@ -1,4 +1,3 @@ -LIBRARY "pyshellext" EXPORTS DllRegisterServer PRIVATE DllUnregisterServer PRIVATE diff --git a/PC/pyshellext.idl b/PC/pyshellext.idl deleted file mode 100644 index c0a183876ad52..0000000000000 --- a/PC/pyshellext.idl +++ /dev/null @@ -1,12 +0,0 @@ -import "ocidl.idl"; - -[uuid(44039A76-3BDD-41C1-A31B-71C00202CE81), version(1.0)] -library PyShellExtLib -{ - [uuid(BEA218D2-6950-497B-9434-61683EC065FE), version(1.0)] - coclass PyShellExt - { - [default] interface IDropTarget; - interface IPersistFile; - } -}; \ No newline at end of file diff --git a/PC/pyshellext_d.def b/PC/pyshellext_d.def deleted file mode 100644 index 7d2148bb861ed..0000000000000 --- a/PC/pyshellext_d.def +++ /dev/null @@ -1,6 +0,0 @@ -LIBRARY "pyshellext_d" -EXPORTS - DllRegisterServer PRIVATE - DllUnregisterServer PRIVATE - DllGetClassObject PRIVATE - DllCanUnloadNow PRIVATE diff --git a/PCbuild/pyshellext.vcxproj b/PCbuild/pyshellext.vcxproj index 655054e3723b4..ea432d6bc9a3f 100644 --- a/PCbuild/pyshellext.vcxproj +++ b/PCbuild/pyshellext.vcxproj @@ -96,7 +96,7 @@ version.lib;shlwapi.lib;%(AdditionalDependencies) Console - ..\PC\pyshellext$(PyDebugExt).def + ..\PC\pyshellext.def true @@ -104,11 +104,9 @@ - - diff --git a/PCbuild/pyshellext.vcxproj.filters b/PCbuild/pyshellext.vcxproj.filters index 36d1d1655f5df..77cd3060857a8 100644 --- a/PCbuild/pyshellext.vcxproj.filters +++ b/PCbuild/pyshellext.vcxproj.filters @@ -15,11 +15,6 @@ Source Files - - - Source Files - - Resource Files @@ -29,8 +24,5 @@ Source Files - - Source Files - \ No newline at end of file diff --git a/Tools/msi/bundle/bootstrap/pythonba.vcxproj b/Tools/msi/bundle/bootstrap/pythonba.vcxproj index 75aad442a4403..ef71fe7da08d3 100644 --- a/Tools/msi/bundle/bootstrap/pythonba.vcxproj +++ b/Tools/msi/bundle/bootstrap/pythonba.vcxproj @@ -21,6 +21,9 @@ Release Win32 + v142 + v141 + v140 v140 v120 {7A09B132-B3EE-499B-A700-A4B2157FEA3D} @@ -47,6 +50,8 @@ comctl32.lib;gdiplus.lib;msimg32.lib;shlwapi.lib;wininet.lib;dutil.lib;balutil.lib;version.lib;uxtheme.lib;%(AdditionalDependencies) + $(WixInstallPath)sdk\vs2017\lib\x86 + $(WixInstallPath)sdk\vs2017\lib\x86 $(WixInstallPath)sdk\vs2015\lib\x86 $(WixInstallPath)sdk\vs2013\lib\x86 pythonba.def From webhook-mailer at python.org Tue Jun 23 20:22:29 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 24 Jun 2020 00:22:29 -0000 Subject: [Python-checkins] bpo-40521: Make Unicode latin1 singletons per interpreter (GH-21101) Message-ID: https://github.com/python/cpython/commit/2f9ada96e0d420fed0d09a032b37197f08ef167a commit: 2f9ada96e0d420fed0d09a032b37197f08ef167a branch: master author: Victor Stinner committer: GitHub date: 2020-06-24T02:22:21+02:00 summary: bpo-40521: Make Unicode latin1 singletons per interpreter (GH-21101) Each interpreter now has its own Unicode latin1 singletons. Remove "ifdef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS" and "ifdef LATIN1_SINGLETONS": always enable latin1 singletons. Optimize unicode_result_ready(): only attempt to get a latin1 singleton for PyUnicode_1BYTE_KIND. files: M Include/internal/pycore_interp.h M Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst M Objects/unicodeobject.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index d8947e700f84e..bf1769e5ce2c2 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -73,6 +73,9 @@ struct _Py_bytes_state { struct _Py_unicode_state { // The empty Unicode object is a singleton to improve performance. PyObject *empty; + /* Single character Unicode strings in the Latin-1 range are being + shared as well. */ + PyObject *latin1[256]; struct _Py_unicode_fs_codec fs_codec; }; diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst index e970551f531d0..43226931ccc88 100644 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-20-01-17-34.bpo-40521.wvAehI.rst @@ -3,7 +3,7 @@ Each interpreter now its has own free lists, singletons and caches: * Free lists: float, tuple, list, dict, frame, context, asynchronous generator, MemoryError. * Singletons: empty tuple, empty bytes string, empty Unicode string, - single byte character. + single byte character, single Unicode (latin1) character. * Slice cache. They are no longer shared by all interpreters. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index e4235b1aca3cf..5ba99514d2969 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -303,17 +303,6 @@ unicode_decode_utf8(const char *s, Py_ssize_t size, /* List of static strings. */ static _Py_Identifier *static_strings = NULL; -/* bpo-40521: Latin1 singletons are shared by all interpreters. */ -#ifndef EXPERIMENTAL_ISOLATED_SUBINTERPRETERS -# define LATIN1_SINGLETONS -#endif - -#ifdef LATIN1_SINGLETONS -/* Single character Unicode strings in the Latin-1 range are being - shared as well. */ -static PyObject *unicode_latin1[256] = {NULL}; -#endif - /* Fast detection of the most frequent whitespace characters */ const unsigned char _Py_ascii_whitespace[] = { 0, 0, 0, 0, 0, 0, 0, 0, @@ -657,9 +646,8 @@ unicode_result_wchar(PyObject *unicode) if (len == 1) { wchar_t ch = _PyUnicode_WSTR(unicode)[0]; if ((Py_UCS4)ch < 256) { - PyObject *latin1_char = get_latin1_char((unsigned char)ch); Py_DECREF(unicode); - return latin1_char; + return get_latin1_char((unsigned char)ch); } } @@ -692,13 +680,13 @@ unicode_result_ready(PyObject *unicode) return empty; } -#ifdef LATIN1_SINGLETONS if (length == 1) { - const void *data = PyUnicode_DATA(unicode); int kind = PyUnicode_KIND(unicode); - Py_UCS4 ch = PyUnicode_READ(kind, data, 0); - if (ch < 256) { - PyObject *latin1_char = unicode_latin1[ch]; + if (kind == PyUnicode_1BYTE_KIND) { + Py_UCS1 *data = PyUnicode_1BYTE_DATA(unicode); + Py_UCS1 ch = data[0]; + struct _Py_unicode_state *state = get_unicode_state(); + PyObject *latin1_char = state->latin1[ch]; if (latin1_char != NULL) { if (unicode != latin1_char) { Py_INCREF(latin1_char); @@ -709,12 +697,14 @@ unicode_result_ready(PyObject *unicode) else { assert(_PyUnicode_CheckConsistency(unicode, 1)); Py_INCREF(unicode); - unicode_latin1[ch] = unicode; + state->latin1[ch] = unicode; return unicode; } } + else { + assert(PyUnicode_READ_CHAR(unicode, 0) >= 256); + } } -#endif assert(_PyUnicode_CheckConsistency(unicode, 1)); return unicode; @@ -1981,18 +1971,18 @@ unicode_dealloc(PyObject *unicode) static int unicode_is_singleton(PyObject *unicode) { - if (unicode == unicode_get_empty()) { + struct _Py_unicode_state *state = get_unicode_state(); + if (unicode == state->empty) { return 1; } -#ifdef LATIN1_SINGLETONS PyASCIIObject *ascii = (PyASCIIObject *)unicode; if (ascii->state.kind != PyUnicode_WCHAR_KIND && ascii->length == 1) { Py_UCS4 ch = PyUnicode_READ_CHAR(unicode, 0); - if (ch < 256 && unicode_latin1[ch] == unicode) + if (ch < 256 && state->latin1[ch] == unicode) { return 1; + } } -#endif return 0; } #endif @@ -2130,17 +2120,15 @@ unicode_write_cstr(PyObject *unicode, Py_ssize_t index, } static PyObject* -get_latin1_char(unsigned char ch) +get_latin1_char(Py_UCS1 ch) { - PyObject *unicode; + struct _Py_unicode_state *state = get_unicode_state(); -#ifdef LATIN1_SINGLETONS - unicode = unicode_latin1[ch]; + PyObject *unicode = state->latin1[ch]; if (unicode) { Py_INCREF(unicode); return unicode; } -#endif unicode = PyUnicode_New(1, ch); if (!unicode) { @@ -2150,10 +2138,8 @@ get_latin1_char(unsigned char ch) PyUnicode_1BYTE_DATA(unicode)[0] = ch; assert(_PyUnicode_CheckConsistency(unicode, 1)); -#ifdef LATIN1_SINGLETONS Py_INCREF(unicode); - unicode_latin1[ch] = unicode; -#endif + state->latin1[ch] = unicode; return unicode; } @@ -2164,8 +2150,9 @@ unicode_char(Py_UCS4 ch) assert(ch <= MAX_UNICODE); - if (ch < 256) + if (ch < 256) { return get_latin1_char(ch); + } unicode = PyUnicode_New(1, ch); if (unicode == NULL) @@ -2367,11 +2354,13 @@ _PyUnicode_FromUCS1(const Py_UCS1* u, Py_ssize_t size) PyObject *res; unsigned char max_char; - if (size == 0) + if (size == 0) { _Py_RETURN_UNICODE_EMPTY(); + } assert(size > 0); - if (size == 1) + if (size == 1) { return get_latin1_char(u[0]); + } max_char = ucs1lib_find_max_char(u, u + size); res = PyUnicode_New(size, max_char); @@ -5008,8 +4997,9 @@ unicode_decode_utf8(const char *s, Py_ssize_t size, /* ASCII is equivalent to the first 128 ordinals in Unicode. */ if (size == 1 && (unsigned char)s[0] < 128) { - if (consumed) + if (consumed) { *consumed = 1; + } return get_latin1_char((unsigned char)s[0]); } @@ -7176,8 +7166,9 @@ PyUnicode_DecodeASCII(const char *s, _Py_RETURN_UNICODE_EMPTY(); /* ASCII is equivalent to the first 128 ordinals in Unicode. */ - if (size == 1 && (unsigned char)s[0] < 128) + if (size == 1 && (unsigned char)s[0] < 128) { return get_latin1_char((unsigned char)s[0]); + } // Shortcut for simple case PyObject *u = PyUnicode_New(size, 127); @@ -16234,12 +16225,11 @@ _PyUnicode_Fini(PyThreadState *tstate) Py_CLEAR(state->empty); + for (Py_ssize_t i = 0; i < 256; i++) { + Py_CLEAR(state->latin1[i]); + } + if (is_main_interp) { -#ifdef LATIN1_SINGLETONS - for (Py_ssize_t i = 0; i < 256; i++) { - Py_CLEAR(unicode_latin1[i]); - } -#endif unicode_clear_static_strings(); } From webhook-mailer at python.org Tue Jun 23 21:21:23 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 24 Jun 2020 01:21:23 -0000 Subject: [Python-checkins] bpo-40521: Fix _PyContext_Fini() (GH-21103) Message-ID: https://github.com/python/cpython/commit/cde283d16d87024f455e45c6f1b4e4f7d8905836 commit: cde283d16d87024f455e45c6f1b4e4f7d8905836 branch: master author: Victor Stinner committer: GitHub date: 2020-06-24T03:21:15+02:00 summary: bpo-40521: Fix _PyContext_Fini() (GH-21103) Only clear _token_missing in the main interpreter. files: M Python/context.c diff --git a/Python/context.c b/Python/context.c index dc34071884750..15d8b8ea4b9b6 100644 --- a/Python/context.c +++ b/Python/context.c @@ -1302,7 +1302,9 @@ _PyContext_ClearFreeList(PyThreadState *tstate) void _PyContext_Fini(PyThreadState *tstate) { - Py_CLEAR(_token_missing); + if (_Py_IsMainInterpreter(tstate)) { + Py_CLEAR(_token_missing); + } _PyContext_ClearFreeList(tstate); #ifdef Py_DEBUG struct _Py_context_state *state = &tstate->interp->context; From webhook-mailer at python.org Tue Jun 23 23:12:21 2020 From: webhook-mailer at python.org (Gareth Rees) Date: Wed, 24 Jun 2020 03:12:21 -0000 Subject: [Python-checkins] bpo-40707: Document that Popen.communicate sets the returncode attribute (GH-20283) Message-ID: https://github.com/python/cpython/commit/bf2e515fa43406d4bd9c4c53ecc9364034d8f9f6 commit: bf2e515fa43406d4bd9c4c53ecc9364034d8f9f6 branch: master author: Gareth Rees committer: GitHub date: 2020-06-23T23:12:10-04:00 summary: bpo-40707: Document that Popen.communicate sets the returncode attribute (GH-20283) files: M Doc/library/subprocess.rst diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index 5988bd35e72b1..e37cc980e9757 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -738,10 +738,11 @@ Instances of the :class:`Popen` class have the following methods: .. method:: Popen.communicate(input=None, timeout=None) Interact with process: Send data to stdin. Read data from stdout and stderr, - until end-of-file is reached. Wait for process to terminate. The optional - *input* argument should be data to be sent to the child process, or - ``None``, if no data should be sent to the child. If streams were opened in - text mode, *input* must be a string. Otherwise, it must be bytes. + until end-of-file is reached. Wait for process to terminate and set the + :attr:`~Popen.returncode` attribute. The optional *input* argument should be + data to be sent to the child process, or ``None``, if no data should be sent + to the child. If streams were opened in text mode, *input* must be a string. + Otherwise, it must be bytes. :meth:`communicate` returns a tuple ``(stdout_data, stderr_data)``. The data will be strings if streams were opened in text mode; otherwise, From webhook-mailer at python.org Tue Jun 23 23:19:44 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 24 Jun 2020 03:19:44 -0000 Subject: [Python-checkins] bpo-40707: Document that Popen.communicate sets the returncode attribute (GH-20283) Message-ID: https://github.com/python/cpython/commit/4f5dde463b588fc97cacb4a1905eb422b16daa37 commit: 4f5dde463b588fc97cacb4a1905eb422b16daa37 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-23T20:19:35-07:00 summary: bpo-40707: Document that Popen.communicate sets the returncode attribute (GH-20283) (cherry picked from commit bf2e515fa43406d4bd9c4c53ecc9364034d8f9f6) Co-authored-by: Gareth Rees files: M Doc/library/subprocess.rst diff --git a/Doc/library/subprocess.rst b/Doc/library/subprocess.rst index a0152a8009179..a93a6c186f8d8 100644 --- a/Doc/library/subprocess.rst +++ b/Doc/library/subprocess.rst @@ -704,10 +704,11 @@ Instances of the :class:`Popen` class have the following methods: .. method:: Popen.communicate(input=None, timeout=None) Interact with process: Send data to stdin. Read data from stdout and stderr, - until end-of-file is reached. Wait for process to terminate. The optional - *input* argument should be data to be sent to the child process, or - ``None``, if no data should be sent to the child. If streams were opened in - text mode, *input* must be a string. Otherwise, it must be bytes. + until end-of-file is reached. Wait for process to terminate and set the + :attr:`~Popen.returncode` attribute. The optional *input* argument should be + data to be sent to the child process, or ``None``, if no data should be sent + to the child. If streams were opened in text mode, *input* must be a string. + Otherwise, it must be bytes. :meth:`communicate` returns a tuple ``(stdout_data, stderr_data)``. The data will be strings if streams were opened in text mode; otherwise, From webhook-mailer at python.org Wed Jun 24 01:46:22 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Wed, 24 Jun 2020 05:46:22 -0000 Subject: [Python-checkins] bpo-41094: Fix decoding errors with audit when open files. (GH-21095) Message-ID: https://github.com/python/cpython/commit/6c6810d98979add7a89391c3c38990d0859f7a29 commit: 6c6810d98979add7a89391c3c38990d0859f7a29 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-24T08:46:05+03:00 summary: bpo-41094: Fix decoding errors with audit when open files. (GH-21095) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst M Lib/test/test_embed.py M Modules/_ctypes/callproc.c M Modules/main.c M Python/fileutils.c diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index fe47289777a42..da70df7914c85 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1349,7 +1349,7 @@ def test_audit_run_file(self): returncode=1) def test_audit_run_interactivehook(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("import sys", file=f) print("sys.__interactivehook__ = lambda: None", file=f) @@ -1362,7 +1362,7 @@ def test_audit_run_interactivehook(self): os.unlink(startup) def test_audit_run_startup(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("pass", file=f) try: diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst new file mode 100644 index 0000000000000..6dd45e21d1758 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst @@ -0,0 +1,2 @@ +Fix decoding errors with audit when open files with non-ASCII names on non-UTF-8 +locale. diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index af6e1e8ce0b75..6030cc3d43670 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1415,15 +1415,12 @@ static PyObject *py_dl_open(PyObject *self, PyObject *args) if (name != Py_None) { if (PyUnicode_FSConverter(name, &name2) == 0) return NULL; - if (PyBytes_Check(name2)) - name_str = PyBytes_AS_STRING(name2); - else - name_str = PyByteArray_AS_STRING(name2); + name_str = PyBytes_AS_STRING(name2); } else { name_str = NULL; name2 = NULL; } - if (PySys_Audit("ctypes.dlopen", "s", name_str) < 0) { + if (PySys_Audit("ctypes.dlopen", "O", name) < 0) { return NULL; } handle = ctypes_dlopen(name_str, mode); diff --git a/Modules/main.c b/Modules/main.c index bc3a2ed8ed8d1..8e3b35ca5ce35 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -379,13 +379,20 @@ pymain_run_startup(PyConfig *config, PyCompilerFlags *cf, int *exitcode) if (startup == NULL) { return 0; } - if (PySys_Audit("cpython.run_startup", "s", startup) < 0) { + PyObject *startup_obj = PyUnicode_DecodeFSDefault(startup); + if (startup_obj == NULL) { return pymain_err_print(exitcode); } + if (PySys_Audit("cpython.run_startup", "O", startup_obj) < 0) { + Py_DECREF(startup_obj); + return pymain_err_print(exitcode); + } + Py_DECREF(startup_obj); FILE *fp = _Py_fopen(startup, "r"); if (fp == NULL) { int save_errno = errno; + PyErr_Clear(); PySys_WriteStderr("Could not open PYTHONSTARTUP\n"); errno = save_errno; diff --git a/Python/fileutils.c b/Python/fileutils.c index 22e72bdd5a9c7..2c86828ba989a 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -1274,7 +1274,12 @@ _Py_open_impl(const char *pathname, int flags, int gil_held) #endif if (gil_held) { - if (PySys_Audit("open", "sOi", pathname, Py_None, flags) < 0) { + PyObject *pathname_obj = PyUnicode_DecodeFSDefault(pathname); + if (pathname_obj == NULL) { + return -1; + } + if (PySys_Audit("open", "OOi", pathname_obj, Py_None, flags) < 0) { + Py_DECREF(pathname_obj); return -1; } @@ -1284,12 +1289,16 @@ _Py_open_impl(const char *pathname, int flags, int gil_held) Py_END_ALLOW_THREADS } while (fd < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); - if (async_err) + if (async_err) { + Py_DECREF(pathname_obj); return -1; + } if (fd < 0) { - PyErr_SetFromErrnoWithFilename(PyExc_OSError, pathname); + PyErr_SetFromErrnoWithFilenameObjects(PyExc_OSError, pathname_obj, NULL); + Py_DECREF(pathname_obj); return -1; } + Py_DECREF(pathname_obj); } else { fd = open(pathname, flags); @@ -1385,9 +1394,15 @@ _Py_wfopen(const wchar_t *path, const wchar_t *mode) FILE* _Py_fopen(const char *pathname, const char *mode) { - if (PySys_Audit("open", "ssi", pathname, mode, 0) < 0) { + PyObject *pathname_obj = PyUnicode_DecodeFSDefault(pathname); + if (pathname_obj == NULL) { + return NULL; + } + if (PySys_Audit("open", "Osi", pathname_obj, mode, 0) < 0) { + Py_DECREF(pathname_obj); return NULL; } + Py_DECREF(pathname_obj); FILE *f = fopen(pathname, mode); if (f == NULL) From webhook-mailer at python.org Wed Jun 24 07:14:15 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 24 Jun 2020 11:14:15 -0000 Subject: [Python-checkins] Fix typo in dataclasses module (GH-21109) (#21111) Message-ID: https://github.com/python/cpython/commit/0029099decbf0272cea837b029662bee1ee3e4d4 commit: 0029099decbf0272cea837b029662bee1ee3e4d4 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-24T07:14:10-04:00 summary: Fix typo in dataclasses module (GH-21109) (#21111) Automerge-Triggered-By: @matrixise (cherry picked from commit 80526f68411a9406a9067095fbf6a0f88047cac5) Co-authored-by: J?rgen Gmach Co-authored-by: J?rgen Gmach files: M Lib/dataclasses.py diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index 74f79294e81bd..10bb33e3746a8 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -1092,7 +1092,7 @@ def _asdict_inner(obj, dict_factory): # method, because: # - it does not recurse in to the namedtuple fields and # convert them to dicts (using dict_factory). - # - I don't actually want to return a dict here. The the main + # - I don't actually want to return a dict here. The main # use case here is json.dumps, and it handles converting # namedtuples to lists. Admittedly we're losing some # information here when we produce a json list instead of a From webhook-mailer at python.org Wed Jun 24 08:58:46 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 24 Jun 2020 12:58:46 -0000 Subject: [Python-checkins] bpo-41005: Fixed perrmission error (GH-20936) (GH-21052) Message-ID: https://github.com/python/cpython/commit/adf8708c44945bfa68db8b02c6a1f560f05a5151 commit: adf8708c44945bfa68db8b02c6a1f560f05a5151 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-24T14:58:27+02:00 summary: bpo-41005: Fixed perrmission error (GH-20936) (GH-21052) * fixed issue 41005: webbrowser fails when xdg-settings cannot be executed Co-authored-by: KrishnaSai2020 Co-authored-by: blurb-it[bot] <43283697+blurb-it[bot]@users.noreply.github.com> (cherry picked from commit 9e27bc0c1efc7478872f98729f87886e9333548f) Co-authored-by: Krishna Chivukula <63070026+KrishnaSai2020 at users.noreply.github.com> Co-authored-by: Krishna Chivukula <63070026+KrishnaSai2020 at users.noreply.github.com> files: A Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst M Lib/webbrowser.py diff --git a/Lib/webbrowser.py b/Lib/webbrowser.py index 3dcf66b659825..53e0efc967a0e 100755 --- a/Lib/webbrowser.py +++ b/Lib/webbrowser.py @@ -550,7 +550,7 @@ def register_standard_browsers(): cmd = "xdg-settings get default-web-browser".split() raw_result = subprocess.check_output(cmd, stderr=subprocess.DEVNULL) result = raw_result.decode().strip() - except (FileNotFoundError, subprocess.CalledProcessError): + except (FileNotFoundError, subprocess.CalledProcessError, PermissionError) : pass else: global _os_preferred_browser diff --git a/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst b/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst new file mode 100644 index 0000000000000..3b5f3f23a12f5 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst @@ -0,0 +1 @@ +fixed an XDG settings issue not allowing macos to open browser in webbrowser.py \ No newline at end of file From webhook-mailer at python.org Wed Jun 24 09:22:06 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Wed, 24 Jun 2020 13:22:06 -0000 Subject: [Python-checkins] bpo-40521: Always create the empty tuple singleton (GH-21116) Message-ID: https://github.com/python/cpython/commit/0430dfac629b4eb0e899a09b899a494aa92145f6 commit: 0430dfac629b4eb0e899a09b899a494aa92145f6 branch: master author: Victor Stinner committer: GitHub date: 2020-06-24T15:21:54+02:00 summary: bpo-40521: Always create the empty tuple singleton (GH-21116) Py_InitializeFromConfig() now always creates the empty tuple singleton as soon as possible. Optimize PyTuple_New(0): it no longer has to check if the empty tuple was created or not, it is always creatd. * Add tuple_create_empty_tuple_singleton() function. * Add tuple_get_empty() function. * Remove state parameter of tuple_alloc(). files: M Include/internal/pycore_pylifecycle.h M Objects/tupleobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index f29c7cb9f392c..3b2173787118f 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -34,6 +34,7 @@ PyAPI_FUNC(int) _Py_IsLocaleCoercionTarget(const char *ctype_loc); extern PyStatus _PyUnicode_Init(PyThreadState *tstate); extern int _PyStructSequence_Init(void); extern int _PyLong_Init(PyThreadState *tstate); +extern PyStatus _PyTuple_Init(PyThreadState *tstate); extern PyStatus _PyFaulthandler_Init(int enable); extern int _PyTraceMalloc_Init(int enable); extern PyObject * _PyBuiltin_Init(PyThreadState *tstate); diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c index f4f9aa259e8b2..41677d7e710aa 100644 --- a/Objects/tupleobject.c +++ b/Objects/tupleobject.c @@ -2,10 +2,10 @@ /* Tuple object implementation */ #include "Python.h" -#include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_accu.h" -#include "pycore_gc.h" // _PyObject_GC_IS_TRACKED() -#include "pycore_object.h" +#include "pycore_abstract.h" // _PyIndex_Check() +#include "pycore_gc.h" // _PyObject_GC_IS_TRACKED() +#include "pycore_initconfig.h" // _PyStatus_OK() +#include "pycore_object.h" // _PyObject_GC_TRACK() /*[clinic input] class tuple "PyTupleObject *" "&PyTuple_Type" @@ -15,12 +15,14 @@ class tuple "PyTupleObject *" "&PyTuple_Type" #include "clinic/tupleobject.c.h" +#if PyTuple_MAXSAVESIZE > 0 static struct _Py_tuple_state * get_tuple_state(void) { PyInterpreterState *interp = _PyInterpreterState_GET(); return &interp->tuple; } +#endif static inline void @@ -55,14 +57,21 @@ _PyTuple_DebugMallocStats(FILE *out) which wraps this function). */ static PyTupleObject * -tuple_alloc(struct _Py_tuple_state *state, Py_ssize_t size) +tuple_alloc(Py_ssize_t size) { PyTupleObject *op; +#if PyTuple_MAXSAVESIZE > 0 + // If Python is built with the empty tuple singleton, + // tuple_alloc(0) must not be called. + assert(size != 0); +#endif if (size < 0) { PyErr_BadInternalCall(); return NULL; } + #if PyTuple_MAXSAVESIZE > 0 + struct _Py_tuple_state *state = get_tuple_state(); #ifdef Py_DEBUG // tuple_alloc() must not be called after _PyTuple_Fini() assert(state->numfree[0] != -1); @@ -93,36 +102,65 @@ tuple_alloc(struct _Py_tuple_state *state, Py_ssize_t size) return op; } +static int +tuple_create_empty_tuple_singleton(struct _Py_tuple_state *state) +{ +#if PyTuple_MAXSAVESIZE > 0 + assert(state->free_list[0] == NULL); + + PyTupleObject *op = PyObject_GC_NewVar(PyTupleObject, &PyTuple_Type, 0); + if (op == NULL) { + return -1; + } + // The empty tuple singleton is not tracked by the GC. + // It does not contain any Python object. + + state->free_list[0] = op; + state->numfree[0]++; + + assert(state->numfree[0] == 1); +#endif + return 0; +} + + +static PyObject * +tuple_get_empty(void) +{ +#if PyTuple_MAXSAVESIZE > 0 + struct _Py_tuple_state *state = get_tuple_state(); + PyTupleObject *op = state->free_list[0]; + // tuple_get_empty() must not be called before _PyTuple_Init() + // or after _PyTuple_Fini() + assert(op != NULL); +#ifdef Py_DEBUG + assert(state->numfree[0] != -1); +#endif + + Py_INCREF(op); + return (PyObject *) op; +#else + return PyTuple_New(0); +#endif +} + + PyObject * PyTuple_New(Py_ssize_t size) { PyTupleObject *op; #if PyTuple_MAXSAVESIZE > 0 - struct _Py_tuple_state *state = get_tuple_state(); - if (size == 0 && state->free_list[0]) { - op = state->free_list[0]; - Py_INCREF(op); - return (PyObject *) op; + if (size == 0) { + return tuple_get_empty(); } #endif - op = tuple_alloc(state, size); + op = tuple_alloc(size); if (op == NULL) { return NULL; } for (Py_ssize_t i = 0; i < size; i++) { op->ob_item[i] = NULL; } -#if PyTuple_MAXSAVESIZE > 0 - if (size == 0) { -#ifdef Py_DEBUG - // PyTuple_New() must not be called after _PyTuple_Fini() - assert(state->numfree[0] != -1); -#endif - state->free_list[0] = op; - ++state->numfree[0]; - Py_INCREF(op); /* extra INCREF so that this is never freed */ - } -#endif tuple_gc_track(op); return (PyObject *) op; } @@ -203,13 +241,11 @@ PyTuple_Pack(Py_ssize_t n, ...) va_list vargs; if (n == 0) { - return PyTuple_New(0); + return tuple_get_empty(); } - struct _Py_tuple_state *state = get_tuple_state(); - va_start(vargs, n); - PyTupleObject *result = tuple_alloc(state, n); + PyTupleObject *result = tuple_alloc(n); if (result == NULL) { va_end(vargs); return NULL; @@ -245,9 +281,9 @@ tupledealloc(PyTupleObject *op) // tupledealloc() must not be called after _PyTuple_Fini() assert(state->numfree[0] != -1); #endif - if (len < PyTuple_MAXSAVESIZE && - state->numfree[len] < PyTuple_MAXFREELIST && - Py_IS_TYPE(op, &PyTuple_Type)) + if (len < PyTuple_MAXSAVESIZE + && state->numfree[len] < PyTuple_MAXFREELIST + && Py_IS_TYPE(op, &PyTuple_Type)) { op->ob_item[0] = (PyObject *) state->free_list[len]; state->numfree[len]++; @@ -257,6 +293,7 @@ tupledealloc(PyTupleObject *op) #endif } Py_TYPE(op)->tp_free((PyObject *)op); + #if PyTuple_MAXSAVESIZE > 0 done: #endif @@ -423,11 +460,10 @@ PyObject * _PyTuple_FromArray(PyObject *const *src, Py_ssize_t n) { if (n == 0) { - return PyTuple_New(0); + return tuple_get_empty(); } - struct _Py_tuple_state *state = get_tuple_state(); - PyTupleObject *tuple = tuple_alloc(state, n); + PyTupleObject *tuple = tuple_alloc(n); if (tuple == NULL) { return NULL; } @@ -494,11 +530,10 @@ tupleconcat(PyTupleObject *a, PyObject *bb) assert((size_t)Py_SIZE(a) + (size_t)Py_SIZE(b) < PY_SSIZE_T_MAX); size = Py_SIZE(a) + Py_SIZE(b); if (size == 0) { - return PyTuple_New(0); + return tuple_get_empty(); } - struct _Py_tuple_state *state = get_tuple_state(); - np = tuple_alloc(state, size); + np = tuple_alloc(size); if (np == NULL) { return NULL; } @@ -536,13 +571,12 @@ tuplerepeat(PyTupleObject *a, Py_ssize_t n) } } if (Py_SIZE(a) == 0 || n <= 0) { - return PyTuple_New(0); + return tuple_get_empty(); } if (n > PY_SSIZE_T_MAX / Py_SIZE(a)) return PyErr_NoMemory(); size = Py_SIZE(a) * n; - struct _Py_tuple_state *state = get_tuple_state(); - np = tuple_alloc(state, size); + np = tuple_alloc(size); if (np == NULL) return NULL; p = np->ob_item; @@ -713,10 +747,12 @@ tuple_new_impl(PyTypeObject *type, PyObject *iterable) if (type != &PyTuple_Type) return tuple_subtype_new(type, iterable); - if (iterable == NULL) - return PyTuple_New(0); - else + if (iterable == NULL) { + return tuple_get_empty(); + } + else { return PySequence_Tuple(iterable); + } } static PyObject * @@ -735,7 +771,9 @@ tuple_vectorcall(PyObject *type, PyObject * const*args, if (nargs) { return tuple_new_impl((PyTypeObject *)type, args[0]); } - return PyTuple_New(0); + else { + return tuple_get_empty(); + } } static PyObject * @@ -798,7 +836,7 @@ tuplesubscript(PyTupleObject* self, PyObject* item) &stop, step); if (slicelength <= 0) { - return PyTuple_New(0); + return tuple_get_empty(); } else if (start == 0 && step == 1 && slicelength == PyTuple_GET_SIZE(self) && @@ -807,8 +845,7 @@ tuplesubscript(PyTupleObject* self, PyObject* item) return (PyObject *)self; } else { - struct _Py_tuple_state *state = get_tuple_state(); - PyTupleObject* result = tuple_alloc(state, slicelength); + PyTupleObject* result = tuple_alloc(slicelength); if (!result) return NULL; src = self->ob_item; @@ -988,15 +1025,26 @@ _PyTuple_ClearFreeList(PyThreadState *tstate) #endif } + +PyStatus +_PyTuple_Init(PyThreadState *tstate) +{ + struct _Py_tuple_state *state = &tstate->interp->tuple; + if (tuple_create_empty_tuple_singleton(state) < 0) { + return _PyStatus_NO_MEMORY(); + } + return _PyStatus_OK(); +} + + void _PyTuple_Fini(PyThreadState *tstate) { #if PyTuple_MAXSAVESIZE > 0 struct _Py_tuple_state *state = &tstate->interp->tuple; - /* empty tuples are used all over the place and applications may - * rely on the fact that an empty tuple is a singleton. */ + // The empty tuple singleton must not be tracked by the GC + assert(!_PyObject_GC_IS_TRACKED(state->free_list[0])); Py_CLEAR(state->free_list[0]); - _PyTuple_ClearFreeList(tstate); #ifdef Py_DEBUG state->numfree[0] = -1; diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index eda4c6ad7e474..4b658f847bc12 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -583,6 +583,14 @@ pycore_init_types(PyThreadState *tstate) return status; } + // Create the empty tuple singleton. It must be created before the first + // PyType_Ready() call since PyType_Ready() creates tuples, for tp_bases + // for example. + status = _PyTuple_Init(tstate); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + if (is_main_interp) { status = _PyTypes_Init(); if (_PyStatus_EXCEPTION(status)) { @@ -590,7 +598,6 @@ pycore_init_types(PyThreadState *tstate) } } - if (!_PyLong_Init(tstate)) { return _PyStatus_ERR("can't init longs"); } From webhook-mailer at python.org Wed Jun 24 12:27:55 2020 From: webhook-mailer at python.org (Nikita Nemkin) Date: Wed, 24 Jun 2020 16:27:55 -0000 Subject: [Python-checkins] bpo-41038: Fix non-ASCII string corruption in Win32 resource files (GH-20985) Message-ID: https://github.com/python/cpython/commit/33b79b11b891adea5a916df8e3779505b37aabe7 commit: 33b79b11b891adea5a916df8e3779505b37aabe7 branch: master author: Nikita Nemkin committer: GitHub date: 2020-06-24T17:27:42+01:00 summary: bpo-41038: Fix non-ASCII string corruption in Win32 resource files (GH-20985) In absence of explicit declaration, resource compiler uses system codepage. When this codepage is DBCS or UTF-8, Python's copyright string is corrupted, because it contains copyright sign encoded as \xA9. The fix is to explicitly declare codepage 1252. files: M PC/python_ver_rc.h diff --git a/PC/python_ver_rc.h b/PC/python_ver_rc.h index c318d4487ea1b..060aecdc675cb 100644 --- a/PC/python_ver_rc.h +++ b/PC/python_ver_rc.h @@ -1,6 +1,7 @@ // Resource script for Python core DLL. // Currently only holds version information. // +#pragma code_page(1252) #include "winver.h" #define PYTHON_COMPANY "Python Software Foundation" From webhook-mailer at python.org Wed Jun 24 12:43:00 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 24 Jun 2020 16:43:00 -0000 Subject: [Python-checkins] bpo-41038: Fix non-ASCII string corruption in Win32 resource files (GH-20985) Message-ID: https://github.com/python/cpython/commit/fb4a6241054ad6b7f24d1b32af6827e02936d568 commit: fb4a6241054ad6b7f24d1b32af6827e02936d568 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-24T09:42:55-07:00 summary: bpo-41038: Fix non-ASCII string corruption in Win32 resource files (GH-20985) In absence of explicit declaration, resource compiler uses system codepage. When this codepage is DBCS or UTF-8, Python's copyright string is corrupted, because it contains copyright sign encoded as \xA9. The fix is to explicitly declare codepage 1252. (cherry picked from commit 33b79b11b891adea5a916df8e3779505b37aabe7) Co-authored-by: Nikita Nemkin files: M PC/python_ver_rc.h diff --git a/PC/python_ver_rc.h b/PC/python_ver_rc.h index f95e755bb8bd7..d725a9ba06ebd 100644 --- a/PC/python_ver_rc.h +++ b/PC/python_ver_rc.h @@ -1,6 +1,7 @@ // Resource script for Python core DLL. // Currently only holds version information. // +#pragma code_page(1252) #include "winver.h" #define PYTHON_COMPANY "Python Software Foundation" From webhook-mailer at python.org Wed Jun 24 12:45:43 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 24 Jun 2020 16:45:43 -0000 Subject: [Python-checkins] bpo-41094: Fix decoding errors with audit when open files. (GH-21095) Message-ID: https://github.com/python/cpython/commit/1813d318fd4e517042415fa4f59fe8668c17a235 commit: 1813d318fd4e517042415fa4f59fe8668c17a235 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-24T09:45:38-07:00 summary: bpo-41094: Fix decoding errors with audit when open files. (GH-21095) (cherry picked from commit 6c6810d98979add7a89391c3c38990d0859f7a29) Co-authored-by: Serhiy Storchaka files: A Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst M Lib/test/test_embed.py M Modules/_ctypes/callproc.c M Modules/main.c M Python/fileutils.c diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index da79d7af05023..886ccc5a125a9 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1309,7 +1309,7 @@ def test_audit_run_file(self): self.run_embedded_interpreter("test_audit_run_file", timeout=3, returncode=1) def test_audit_run_interactivehook(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("import sys", file=f) print("sys.__interactivehook__ = lambda: None", file=f) @@ -1321,7 +1321,7 @@ def test_audit_run_interactivehook(self): os.unlink(startup) def test_audit_run_startup(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("pass", file=f) try: diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst new file mode 100644 index 0000000000000..6dd45e21d1758 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst @@ -0,0 +1,2 @@ +Fix decoding errors with audit when open files with non-ASCII names on non-UTF-8 +locale. diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c index 55fc226ca1258..a9b8675cd951b 100644 --- a/Modules/_ctypes/callproc.c +++ b/Modules/_ctypes/callproc.c @@ -1416,15 +1416,12 @@ static PyObject *py_dl_open(PyObject *self, PyObject *args) if (name != Py_None) { if (PyUnicode_FSConverter(name, &name2) == 0) return NULL; - if (PyBytes_Check(name2)) - name_str = PyBytes_AS_STRING(name2); - else - name_str = PyByteArray_AS_STRING(name2); + name_str = PyBytes_AS_STRING(name2); } else { name_str = NULL; name2 = NULL; } - if (PySys_Audit("ctypes.dlopen", "s", name_str) < 0) { + if (PySys_Audit("ctypes.dlopen", "O", name) < 0) { return NULL; } handle = ctypes_dlopen(name_str, mode); diff --git a/Modules/main.c b/Modules/main.c index 2a360b58efa83..788bc119095c0 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -391,13 +391,20 @@ pymain_run_startup(PyConfig *config, PyCompilerFlags *cf, int *exitcode) if (startup == NULL) { return 0; } - if (PySys_Audit("cpython.run_startup", "s", startup) < 0) { + PyObject *startup_obj = PyUnicode_DecodeFSDefault(startup); + if (startup_obj == NULL) { return pymain_err_print(exitcode); } + if (PySys_Audit("cpython.run_startup", "O", startup_obj) < 0) { + Py_DECREF(startup_obj); + return pymain_err_print(exitcode); + } + Py_DECREF(startup_obj); FILE *fp = _Py_fopen(startup, "r"); if (fp == NULL) { int save_errno = errno; + PyErr_Clear(); PySys_WriteStderr("Could not open PYTHONSTARTUP\n"); errno = save_errno; diff --git a/Python/fileutils.c b/Python/fileutils.c index 1021ddb58853e..b274116745efe 100644 --- a/Python/fileutils.c +++ b/Python/fileutils.c @@ -1274,7 +1274,12 @@ _Py_open_impl(const char *pathname, int flags, int gil_held) #endif if (gil_held) { - if (PySys_Audit("open", "sOi", pathname, Py_None, flags) < 0) { + PyObject *pathname_obj = PyUnicode_DecodeFSDefault(pathname); + if (pathname_obj == NULL) { + return -1; + } + if (PySys_Audit("open", "OOi", pathname_obj, Py_None, flags) < 0) { + Py_DECREF(pathname_obj); return -1; } @@ -1284,12 +1289,16 @@ _Py_open_impl(const char *pathname, int flags, int gil_held) Py_END_ALLOW_THREADS } while (fd < 0 && errno == EINTR && !(async_err = PyErr_CheckSignals())); - if (async_err) + if (async_err) { + Py_DECREF(pathname_obj); return -1; + } if (fd < 0) { - PyErr_SetFromErrnoWithFilename(PyExc_OSError, pathname); + PyErr_SetFromErrnoWithFilenameObjects(PyExc_OSError, pathname_obj, NULL); + Py_DECREF(pathname_obj); return -1; } + Py_DECREF(pathname_obj); } else { fd = open(pathname, flags); @@ -1385,9 +1394,15 @@ _Py_wfopen(const wchar_t *path, const wchar_t *mode) FILE* _Py_fopen(const char *pathname, const char *mode) { - if (PySys_Audit("open", "ssi", pathname, mode, 0) < 0) { + PyObject *pathname_obj = PyUnicode_DecodeFSDefault(pathname); + if (pathname_obj == NULL) { + return NULL; + } + if (PySys_Audit("open", "Osi", pathname_obj, mode, 0) < 0) { + Py_DECREF(pathname_obj); return NULL; } + Py_DECREF(pathname_obj); FILE *f = fopen(pathname, mode); if (f == NULL) From webhook-mailer at python.org Wed Jun 24 12:46:38 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Wed, 24 Jun 2020 16:46:38 -0000 Subject: [Python-checkins] bpo-41094: Additional fix for PYTHONSTARTUP. (GH-21119) Message-ID: https://github.com/python/cpython/commit/a7dc71470156680f1fd5243290c6d377824b7ef4 commit: a7dc71470156680f1fd5243290c6d377824b7ef4 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-24T19:46:30+03:00 summary: bpo-41094: Additional fix for PYTHONSTARTUP. (GH-21119) files: M Lib/test/test_embed.py M Modules/main.c diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index da70df7914c85..e740fe8952999 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1349,7 +1349,7 @@ def test_audit_run_file(self): returncode=1) def test_audit_run_interactivehook(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.FS_NONASCII or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("import sys", file=f) print("sys.__interactivehook__ = lambda: None", file=f) @@ -1362,7 +1362,7 @@ def test_audit_run_interactivehook(self): os.unlink(startup) def test_audit_run_startup(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.FS_NONASCII or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("pass", file=f) try: diff --git a/Modules/main.c b/Modules/main.c index 8e3b35ca5ce35..4a76f4461bf61 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -375,36 +375,70 @@ pymain_run_file(const PyConfig *config, PyCompilerFlags *cf) static int pymain_run_startup(PyConfig *config, PyCompilerFlags *cf, int *exitcode) { + int ret; + PyObject *startup_obj = NULL; + if (!config->use_environment) { + return 0; + } +#ifdef MS_WINDOWS + const wchar_t *wstartup = _wgetenv(L"PYTHONSTARTUP"); + if (wstartup == NULL || wstartup[0] == L'\0') { + return 0; + } + PyObject *startup_bytes = NULL; + startup_obj = PyUnicode_FromWideChar(wstartup, wcslen(wstartup)); + if (startup_obj == NULL) { + goto error; + } + startup_bytes = PyUnicode_EncodeFSDefault(startup_obj); + if (startup_bytes == NULL) { + goto error; + } + const char *startup = PyBytes_AS_STRING(startup_bytes); +#else const char *startup = _Py_GetEnv(config->use_environment, "PYTHONSTARTUP"); if (startup == NULL) { return 0; } - PyObject *startup_obj = PyUnicode_DecodeFSDefault(startup); + startup_obj = PyUnicode_DecodeFSDefault(startup); if (startup_obj == NULL) { - return pymain_err_print(exitcode); + goto error; } +#endif if (PySys_Audit("cpython.run_startup", "O", startup_obj) < 0) { - Py_DECREF(startup_obj); - return pymain_err_print(exitcode); + goto error; } - Py_DECREF(startup_obj); +#ifdef MS_WINDOWS + FILE *fp = _Py_wfopen(wstartup, L"r"); +#else FILE *fp = _Py_fopen(startup, "r"); +#endif if (fp == NULL) { int save_errno = errno; PyErr_Clear(); PySys_WriteStderr("Could not open PYTHONSTARTUP\n"); errno = save_errno; - PyErr_SetFromErrnoWithFilename(PyExc_OSError, startup); - - return pymain_err_print(exitcode); + PyErr_SetFromErrnoWithFilenameObjects(PyExc_OSError, startup_obj, NULL); + goto error; } (void) PyRun_SimpleFileExFlags(fp, startup, 0, cf); PyErr_Clear(); fclose(fp); - return 0; + ret = 0; + +done: +#ifdef MS_WINDOWS + Py_XDECREF(startup_bytes); +#endif + Py_XDECREF(startup_obj); + return ret; + +error: + ret = pymain_err_print(exitcode); + goto done; } From webhook-mailer at python.org Wed Jun 24 13:03:57 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 24 Jun 2020 17:03:57 -0000 Subject: [Python-checkins] bpo-41094: Additional fix for PYTHONSTARTUP. (GH-21119) Message-ID: https://github.com/python/cpython/commit/1dda40c1d2681a8f03a567b72698d88ced6bbd6c commit: 1dda40c1d2681a8f03a567b72698d88ced6bbd6c branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-24T10:03:51-07:00 summary: bpo-41094: Additional fix for PYTHONSTARTUP. (GH-21119) (cherry picked from commit a7dc71470156680f1fd5243290c6d377824b7ef4) Co-authored-by: Serhiy Storchaka files: M Lib/test/test_embed.py M Modules/main.c diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index 886ccc5a125a9..ec2b416da368d 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1309,7 +1309,7 @@ def test_audit_run_file(self): self.run_embedded_interpreter("test_audit_run_file", timeout=3, returncode=1) def test_audit_run_interactivehook(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.FS_NONASCII or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("import sys", file=f) print("sys.__interactivehook__ = lambda: None", file=f) @@ -1321,7 +1321,7 @@ def test_audit_run_interactivehook(self): os.unlink(startup) def test_audit_run_startup(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + (support.TESTFN or '') + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + (support.FS_NONASCII or '') + ".py" with open(startup, "w", encoding="utf-8") as f: print("pass", file=f) try: diff --git a/Modules/main.c b/Modules/main.c index 788bc119095c0..70be4cfacbf82 100644 --- a/Modules/main.c +++ b/Modules/main.c @@ -387,36 +387,70 @@ pymain_run_file(PyConfig *config, PyCompilerFlags *cf) static int pymain_run_startup(PyConfig *config, PyCompilerFlags *cf, int *exitcode) { + int ret; + PyObject *startup_obj = NULL; + if (!config->use_environment) { + return 0; + } +#ifdef MS_WINDOWS + const wchar_t *wstartup = _wgetenv(L"PYTHONSTARTUP"); + if (wstartup == NULL || wstartup[0] == L'\0') { + return 0; + } + PyObject *startup_bytes = NULL; + startup_obj = PyUnicode_FromWideChar(wstartup, wcslen(wstartup)); + if (startup_obj == NULL) { + goto error; + } + startup_bytes = PyUnicode_EncodeFSDefault(startup_obj); + if (startup_bytes == NULL) { + goto error; + } + const char *startup = PyBytes_AS_STRING(startup_bytes); +#else const char *startup = _Py_GetEnv(config->use_environment, "PYTHONSTARTUP"); if (startup == NULL) { return 0; } - PyObject *startup_obj = PyUnicode_DecodeFSDefault(startup); + startup_obj = PyUnicode_DecodeFSDefault(startup); if (startup_obj == NULL) { - return pymain_err_print(exitcode); + goto error; } +#endif if (PySys_Audit("cpython.run_startup", "O", startup_obj) < 0) { - Py_DECREF(startup_obj); - return pymain_err_print(exitcode); + goto error; } - Py_DECREF(startup_obj); +#ifdef MS_WINDOWS + FILE *fp = _Py_wfopen(wstartup, L"r"); +#else FILE *fp = _Py_fopen(startup, "r"); +#endif if (fp == NULL) { int save_errno = errno; PyErr_Clear(); PySys_WriteStderr("Could not open PYTHONSTARTUP\n"); errno = save_errno; - PyErr_SetFromErrnoWithFilename(PyExc_OSError, startup); - - return pymain_err_print(exitcode); + PyErr_SetFromErrnoWithFilenameObjects(PyExc_OSError, startup_obj, NULL); + goto error; } (void) PyRun_SimpleFileExFlags(fp, startup, 0, cf); PyErr_Clear(); fclose(fp); - return 0; + ret = 0; + +done: +#ifdef MS_WINDOWS + Py_XDECREF(startup_bytes); +#endif + Py_XDECREF(startup_obj); + return ret; + +error: + ret = pymain_err_print(exitcode); + goto done; } From webhook-mailer at python.org Wed Jun 24 17:50:57 2020 From: webhook-mailer at python.org (Arisaka97) Date: Wed, 24 Jun 2020 21:50:57 -0000 Subject: [Python-checkins] bpo-40773: Fix rendering for 'retval' on the pdb page (GH-21081) Message-ID: https://github.com/python/cpython/commit/cf18c9e9d4d44f6671a3fe6011bb53d8ee9bd92b commit: cf18c9e9d4d44f6671a3fe6011bb53d8ee9bd92b branch: master author: Arisaka97 committer: GitHub date: 2020-06-24T14:50:49-07:00 summary: bpo-40773: Fix rendering for 'retval' on the pdb page (GH-21081) Automerge-Triggered-By: @merwok files: M Doc/library/pdb.rst diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index 606e8e5345770..ed1e9712c0e3d 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -538,6 +538,7 @@ by the local file. executed in the current environment). .. pdbcommand:: retval + Print the return value for the last return of a function. .. rubric:: Footnotes From webhook-mailer at python.org Wed Jun 24 18:03:21 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Wed, 24 Jun 2020 22:03:21 -0000 Subject: [Python-checkins] bpo-40773: Fix rendering for 'retval' on the pdb page (GH-21081) Message-ID: https://github.com/python/cpython/commit/ec05a7feed120503a3ad2dd4547eea2aa9a81dae commit: ec05a7feed120503a3ad2dd4547eea2aa9a81dae branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-24T15:03:11-07:00 summary: bpo-40773: Fix rendering for 'retval' on the pdb page (GH-21081) Automerge-Triggered-By: @merwok (cherry picked from commit cf18c9e9d4d44f6671a3fe6011bb53d8ee9bd92b) Co-authored-by: Arisaka97 files: M Doc/library/pdb.rst diff --git a/Doc/library/pdb.rst b/Doc/library/pdb.rst index 5ee7faaa5c9f8..ca0a507a0e64e 100644 --- a/Doc/library/pdb.rst +++ b/Doc/library/pdb.rst @@ -538,6 +538,7 @@ by the local file. executed in the current environment). .. pdbcommand:: retval + Print the return value for the last return of a function. .. rubric:: Footnotes From webhook-mailer at python.org Thu Jun 25 02:30:30 2020 From: webhook-mailer at python.org (Bruce Merry) Date: Thu, 25 Jun 2020 06:30:30 -0000 Subject: [Python-checkins] bpo-41002: Optimize HTTPResponse.read with a given amount (GH-20943) Message-ID: https://github.com/python/cpython/commit/152f0b8beea12e6282d284100b600771b968927a commit: 152f0b8beea12e6282d284100b600771b968927a branch: master author: Bruce Merry committer: GitHub date: 2020-06-24T23:30:21-07:00 summary: bpo-41002: Optimize HTTPResponse.read with a given amount (GH-20943) I've done the implementation for both non-chunked and chunked reads. I haven't benchmarked chunked reads because I don't currently have a convenient way to generate a high-bandwidth chunked stream, but I don't see any reason that it shouldn't enjoy the same benefits that the non-chunked case does. I've used the benchmark attached to the bpo bug to verify that performance now matches the unsized read case. Automerge-Triggered-By: @methane files: A Misc/NEWS.d/next/Library/2020-06-17-17-26-24.bpo-41002.NPBItE.rst M Lib/http/client.py M Lib/test/test_httplib.py diff --git a/Lib/http/client.py b/Lib/http/client.py index 019380a720318..500230d5d514f 100644 --- a/Lib/http/client.py +++ b/Lib/http/client.py @@ -448,18 +448,25 @@ def read(self, amt=None): self._close_conn() return b"" + if self.chunked: + return self._read_chunked(amt) + if amt is not None: - # Amount is given, implement using readinto - b = bytearray(amt) - n = self.readinto(b) - return memoryview(b)[:n].tobytes() + if self.length is not None and amt > self.length: + # clip the read to the "end of response" + amt = self.length + s = self.fp.read(amt) + if not s and amt: + # Ideally, we would raise IncompleteRead if the content-length + # wasn't satisfied, but it might break compatibility. + self._close_conn() + elif self.length is not None: + self.length -= len(s) + if not self.length: + self._close_conn() + return s else: # Amount is not given (unbounded read) so we must check self.length - # and self.chunked - - if self.chunked: - return self._readall_chunked() - if self.length is None: s = self.fp.read() else: @@ -560,7 +567,7 @@ def _get_chunk_left(self): self.chunk_left = chunk_left return chunk_left - def _readall_chunked(self): + def _read_chunked(self, amt=None): assert self.chunked != _UNKNOWN value = [] try: @@ -568,7 +575,15 @@ def _readall_chunked(self): chunk_left = self._get_chunk_left() if chunk_left is None: break + + if amt is not None and amt <= chunk_left: + value.append(self._safe_read(amt)) + self.chunk_left = chunk_left - amt + break + value.append(self._safe_read(chunk_left)) + if amt is not None: + amt -= chunk_left self.chunk_left = 0 return b''.join(value) except IncompleteRead: diff --git a/Lib/test/test_httplib.py b/Lib/test/test_httplib.py index e95487bcd45db..e909980d23aac 100644 --- a/Lib/test/test_httplib.py +++ b/Lib/test/test_httplib.py @@ -569,6 +569,33 @@ def test_partial_readintos(self): resp.close() self.assertTrue(resp.closed) + def test_partial_reads_past_end(self): + # if we have Content-Length, clip reads to the end + body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText" + sock = FakeSocket(body) + resp = client.HTTPResponse(sock) + resp.begin() + self.assertEqual(resp.read(10), b'Text') + self.assertTrue(resp.isclosed()) + self.assertFalse(resp.closed) + resp.close() + self.assertTrue(resp.closed) + + def test_partial_readintos_past_end(self): + # if we have Content-Length, clip readintos to the end + body = "HTTP/1.1 200 Ok\r\nContent-Length: 4\r\n\r\nText" + sock = FakeSocket(body) + resp = client.HTTPResponse(sock) + resp.begin() + b = bytearray(10) + n = resp.readinto(b) + self.assertEqual(n, 4) + self.assertEqual(bytes(b)[:4], b'Text') + self.assertTrue(resp.isclosed()) + self.assertFalse(resp.closed) + resp.close() + self.assertTrue(resp.closed) + def test_partial_reads_no_content_length(self): # when no length is present, the socket should be gracefully closed when # all data was read diff --git a/Misc/NEWS.d/next/Library/2020-06-17-17-26-24.bpo-41002.NPBItE.rst b/Misc/NEWS.d/next/Library/2020-06-17-17-26-24.bpo-41002.NPBItE.rst new file mode 100644 index 0000000000000..c3eebb7b9aed7 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-17-17-26-24.bpo-41002.NPBItE.rst @@ -0,0 +1 @@ +Improve performance of HTTPResponse.read with a given amount. Patch by Bruce Merry. From webhook-mailer at python.org Thu Jun 25 03:48:11 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 25 Jun 2020 07:48:11 -0000 Subject: [Python-checkins] Fix macOS installer build typos Message-ID: https://github.com/python/cpython/commit/e41eced0fc7212c0739538292844ff2b8c62bc03 commit: e41eced0fc7212c0739538292844ff2b8c62bc03 branch: 3.7 author: Ned Deily committer: Ned Deily date: 2020-06-25T03:47:27-04:00 summary: Fix macOS installer build typos files: M Mac/BuildScript/README.rst M Mac/BuildScript/build-installer.py diff --git a/Mac/BuildScript/README.rst b/Mac/BuildScript/README.rst index 2a7a9c00ee470..94a6bb28cadfa 100644 --- a/Mac/BuildScript/README.rst +++ b/Mac/BuildScript/README.rst @@ -30,7 +30,7 @@ building on a newer version of macOS that will run on older versions by setting MACOSX_DEPLOYMENT_TARGET. This is because the various Python C modules do not yet support runtime testing of macOS feature availability (for example, by using macOS AvailabilityMacros.h -and weak-linking). To build a Python that is used to be used on a +and weak-linking). To build a Python that is to be used on a range of macOS releases, always build on the oldest release to be supported; the necessary shared libraries for that release will normally also be available on later systems, with the occasional @@ -40,7 +40,7 @@ build-installer requires Apple Developer tools, either from the Command Line Tools package or from a full Xcode installation. You should use the most recent version of either for the operating system version in use. (One notable exception: on macOS 10.6, -Snow Leopards, use Xcode 3, not Xcode 4 which was released later +Snow Leopard, use Xcode 3, not Xcode 4 which was released later in the 10.6 support cycle.) 1. 64-bit, x86_64, for OS X 10.9 (and later):: diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index b4d9f4d68a8ab..4fab4882efaeb 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -1328,7 +1328,6 @@ def buildPython(): l_dict = {} exec(data, g_dict, l_dict) build_time_vars = l_dict['build_time_vars'] - exec(data) vars = {} for k, v in build_time_vars.items(): if type(v) == type(''): From webhook-mailer at python.org Thu Jun 25 04:37:21 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 25 Jun 2020 08:37:21 -0000 Subject: [Python-checkins] bpo-41074: Fix support of non-ASCII names and SQL in msilib. (GH-21126) Message-ID: https://github.com/python/cpython/commit/55939b1708d6fc0d36d2be11ccdc6bf207e1bd41 commit: 55939b1708d6fc0d36d2be11ccdc6bf207e1bd41 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-25T11:37:12+03:00 summary: bpo-41074: Fix support of non-ASCII names and SQL in msilib. (GH-21126) * Fix support of non-ASCII names in functions OpenDatabase() and init_database(). * Fix support of non-ASCII SQL in method Database.OpenView(). files: A Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst M Lib/test/test_msilib.py M PC/_msi.c diff --git a/Lib/test/test_msilib.py b/Lib/test/test_msilib.py index f9bd0da7498e9..153a8ac05e560 100644 --- a/Lib/test/test_msilib.py +++ b/Lib/test/test_msilib.py @@ -1,13 +1,13 @@ """ Test suite for the code in msilib """ import os import unittest -from test.support import TESTFN, import_module, unlink +from test.support import TESTFN, FS_NONASCII, import_module, unlink msilib = import_module('msilib') import msilib.schema def init_database(): - path = TESTFN + '.msi' + path = TESTFN + (FS_NONASCII or '') + '.msi' db = msilib.init_database( path, msilib.schema, @@ -42,6 +42,16 @@ def test_view_fetch_returns_none(self): ) self.addCleanup(unlink, db_path) + def test_view_non_ascii(self): + db, db_path = init_database() + view = db.OpenView("SELECT '?-??????' FROM Property") + view.Execute(None) + record = view.Fetch() + self.assertEqual(record.GetString(1), '?-??????') + view.Close() + db.Close() + self.addCleanup(unlink, db_path) + def test_summaryinfo_getproperty_issue1104(self): db, db_path = init_database() try: diff --git a/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst b/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst new file mode 100644 index 0000000000000..ec91fd361c3de --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst @@ -0,0 +1,3 @@ +Fixed support of non-ASCII names in functions :func:`msilib.OpenDatabase` +and :func:`msilib.init_database` and non-ASCII SQL in method +:meth:`msilib.Database.OpenView`. diff --git a/PC/_msi.c b/PC/_msi.c index 6ed8724f77f95..58c1cfd997bf8 100644 --- a/PC/_msi.c +++ b/PC/_msi.c @@ -872,14 +872,14 @@ static PyObject* msidb_openview(msiobj *msidb, PyObject *args) { int status; - char *sql; + const wchar_t *sql; MSIHANDLE hView; msiobj *result; - if (!PyArg_ParseTuple(args, "s:OpenView", &sql)) + if (!PyArg_ParseTuple(args, "u:OpenView", &sql)) return NULL; - if ((status = MsiDatabaseOpenView(msidb->h, sql, &hView)) != ERROR_SUCCESS) + if ((status = MsiDatabaseOpenViewW(msidb->h, sql, &hView)) != ERROR_SUCCESS) return msierror(status); result = PyObject_New(struct msiobj, &msiview_Type); @@ -998,18 +998,18 @@ static PyTypeObject msidb_Type = { static PyObject* msiopendb(PyObject *obj, PyObject *args) { int status; - char *path; + const wchar_t *path; int persist; MSIHANDLE h; msiobj *result; - if (!PyArg_ParseTuple(args, "si:MSIOpenDatabase", &path, &persist)) + if (!PyArg_ParseTuple(args, "ui:MSIOpenDatabase", &path, &persist)) return NULL; /* We need to validate that persist is a valid MSIDBOPEN_* value. Otherwise, MsiOpenDatabase may treat the value as a pointer, leading to unexpected behavior. */ if (Py_INVALID_PERSIST(persist)) return msierror(ERROR_INVALID_PARAMETER); - status = MsiOpenDatabase(path, (LPCSTR)(SIZE_T)persist, &h); + status = MsiOpenDatabaseW(path, (LPCWSTR)(SIZE_T)persist, &h); if (status != ERROR_SUCCESS) return msierror(status); From webhook-mailer at python.org Thu Jun 25 04:51:52 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 25 Jun 2020 08:51:52 -0000 Subject: [Python-checkins] Forward port macOS installer updates from 3.7/3.8/3.9 (GH-21132) Message-ID: https://github.com/python/cpython/commit/1931e64de127db766031c442a083905cd74e2485 commit: 1931e64de127db766031c442a083905cd74e2485 branch: master author: Ned Deily committer: GitHub date: 2020-06-25T04:51:46-04:00 summary: Forward port macOS installer updates from 3.7/3.8/3.9 (GH-21132) files: M Mac/BuildScript/README.rst M Mac/BuildScript/build-installer.py M Mac/BuildScript/resources/ReadMe.rtf M Mac/BuildScript/resources/Welcome.rtf M Mac/PythonLauncher/factorySettings.plist M Mac/Resources/app/Info.plist.in diff --git a/Mac/BuildScript/README.rst b/Mac/BuildScript/README.rst index f8b25fa5af19e..94a6bb28cadfa 100644 --- a/Mac/BuildScript/README.rst +++ b/Mac/BuildScript/README.rst @@ -7,216 +7,84 @@ framework-based Python out-of-tree, installs it in a funny place with $DESTROOT, massages that installation to remove .pyc files and such, creates an Installer package from the installation plus other files in ``resources`` and ``scripts`` and placed that on a ``.dmg`` disk image. - -For Python 3.4.0, PSF practice is to build two installer variants -for each release. - -1. 32-bit-only, i386 and PPC universal, capable on running on all machines - supported by Mac OS X 10.5 through (at least) 10.9:: - - /path/to/bootstrap/python2.7 build-installer.py \ - --sdk-path=/Developer/SDKs/MacOSX10.5.sdk \ - --universal-archs=32-bit \ - --dep-target=10.5 - - - builds the following third-party libraries - - * NCurses 5.9 (http://bugs.python.org/issue15037) - * SQLite 3.8.11 - * XZ 5.0.5 - - - uses system-supplied versions of third-party libraries - - * readline module links with Apple BSD editline (libedit) - - - requires ActiveState ``Tcl/Tk 8.4`` (currently 8.4.20) to be installed for building - - - recommended build environment: - - * Mac OS X 10.5.8 Intel or PPC - * Xcode 3.1.4 - * ``MacOSX10.5`` SDK - * ``MACOSX_DEPLOYMENT_TARGET=10.5`` - * Apple ``gcc-4.2`` - * bootstrap non-framework Python 2.7 for documentation build with - Sphinx (as of 3.4.1) - - - alternate build environments: - - * Mac OS X 10.6.8 with Xcode 3.2.6 - - need to change ``/System/Library/Frameworks/{Tcl,Tk}.framework/Version/Current`` to ``8.4`` - * Note Xcode 4.* does not support building for PPC so cannot be used for this build - -2. 64-bit / 32-bit, x86_64 and i386 universal, for OS X 10.6 (and later):: +The installer package built on the dmg is a macOS bundle format installer +package. This format is deprecated and is no longer supported by modern +macOS systems; it is usable on macOS 10.6 and earlier systems. +To be usable on newer versions of macOS, the bits in the bundle package +must be assembled in a macOS flat installer package, using current +versions of the pkgbuild and productbuild utilities. To pass macoS +Gatekeeper download quarantine, the final package must be signed +with a valid Apple Developer ID certificate using productsign. +Starting with macOS 10.15 Catalina, Gatekeeper now also requires +that installer packages are submitted to and pass Apple's automated +notarization service using the altool command. To pass notarization, +the binaries included in the package must be built with at least +the macOS 10.9 SDK, mout now be signed with the codesign utility +and executables must opt in to the hardened run time option with +any necessary entitlements. Details of these processes are +available in the on-line Apple Developer Documentation and man pages. + +As of 3.8.0 and 3.7.7, PSF practice is to build one installer variants +for each release. Note that as of this writing, no Pythons support +building on a newer version of macOS that will run on older versions +by setting MACOSX_DEPLOYMENT_TARGET. This is because the various +Python C modules do not yet support runtime testing of macOS +feature availability (for example, by using macOS AvailabilityMacros.h +and weak-linking). To build a Python that is to be used on a +range of macOS releases, always build on the oldest release to be +supported; the necessary shared libraries for that release will +normally also be available on later systems, with the occasional +exception such as the removal of 32-bit libraries in macOS 10.15. + +build-installer requires Apple Developer tools, either from the +Command Line Tools package or from a full Xcode installation. +You should use the most recent version of either for the operating +system version in use. (One notable exception: on macOS 10.6, +Snow Leopard, use Xcode 3, not Xcode 4 which was released later +in the 10.6 support cycle.) + +1. 64-bit, x86_64, for OS X 10.9 (and later):: /path/to/bootstrap/python2.7 build-installer.py \ - --sdk-path=/Developer/SDKs/MacOSX10.6.sdk \ - --universal-archs=intel \ - --dep-target=10.6 + --universal-archs=intel-64 \ + --dep-target=10.9 - builds the following third-party libraries - * NCurses 5.9 (http://bugs.python.org/issue15037) - * SQLite 3.8.11 - * XZ 5.0.5 + * OpenSSL 1.1.1 + * Tcl/Tk 8.6 + * NCurses + * SQLite + * XZ + * libffi - uses system-supplied versions of third-party libraries * readline module links with Apple BSD editline (libedit) - - - requires ActiveState Tcl/Tk 8.5.15.1 (or later) to be installed for building - - - recommended build environment: - - * Mac OS X 10.6.8 (or later) - * Xcode 3.2.6 - * ``MacOSX10.6`` SDK - * ``MACOSX_DEPLOYMENT_TARGET=10.6`` - * Apple ``gcc-4.2`` - * bootstrap non-framework Python 2.7 for documentation build with - Sphinx (as of 3.4.1) - - - alternate build environments: - - * none. Xcode 4.x currently supplies two C compilers. - ``llvm-gcc-4.2.1`` has been found to miscompile Python 3.3.x and - produce a non-functional Python executable. As it appears to be - considered a migration aid by Apple and is not likely to be fixed, - its use should be avoided. The other compiler, ``clang``, has been - undergoing rapid development. While it appears to have become - production-ready in the most recent Xcode 5 releases, the versions - available on the deprecated Xcode 4.x for 10.6 were early releases - and did not receive the level of exposure in production environments - that the Xcode 3 gcc-4.2 compiler has had. - - -* For Python 2.7.x and 3.2.x, the 32-bit-only installer was configured to - support Mac OS X 10.3.9 through (at least) 10.6. Because it is - believed that there are few systems still running OS X 10.3 or 10.4 - and because it has become increasingly difficult to test and - support the differences in these earlier systems, as of Python 3.3.0 the PSF - 32-bit installer no longer supports them. For reference in building such - an installer yourself, the details are:: - - /usr/bin/python build-installer.py \ - --sdk-path=/Developer/SDKs/MacOSX10.4u.sdk \ - --universal-archs=32-bit \ - --dep-target=10.3 - - - builds the following third-party libraries - - * Bzip2 - * NCurses - * GNU Readline (GPL) - * SQLite 3 - * XZ - * Zlib 1.2.3 - * Oracle Sleepycat DB 4.8 (Python 2.x only) - - - requires ActiveState ``Tcl/Tk 8.4`` (currently 8.4.20) to be installed for building + * zlib + * bz2 - recommended build environment: - * Mac OS X 10.5.8 PPC or Intel - * Xcode 3.1.4 (or later) - * ``MacOSX10.4u`` SDK (later SDKs do not support PPC G3 processors) - * ``MACOSX_DEPLOYMENT_TARGET=10.3`` - * Apple ``gcc-4.0`` - * system Python 2.5 for documentation build with Sphinx - - - alternate build environments: - - * Mac OS X 10.6.8 with Xcode 3.2.6 - - need to change ``/System/Library/Frameworks/{Tcl,Tk}.framework/Version/Current`` to ``8.4`` - + * Mac OS X 10.9.5 + * Xcode Command Line Tools 6.2 + * ``MacOSX10.9`` SDK + * ``MACOSX_DEPLOYMENT_TARGET=10.9`` + * Apple ``clang`` General Prerequisites --------------------- -* No Fink (in ``/sw``) or MacPorts (in ``/opt/local``) or other local - libraries or utilities (in ``/usr/local``) as they could +* No Fink (in ``/sw``) or MacPorts (in ``/opt/local``) or Homebrew or + other local libraries or utilities (in ``/usr/local``) as they could interfere with the build. -* The documentation for the release is built using Sphinx - because it is included in the installer. For 2.7.x and 3.x.x up to and - including 3.4.0, the ``Doc/Makefile`` uses ``svn`` to download repos of - ``Sphinx`` and its dependencies. Beginning with 3.4.1, the ``Doc/Makefile`` - assumes there is an externally-provided ``sphinx-build`` and requires at - least Python 2.6 to run. Because of this, it is no longer possible to - build a 3.4.1 or later installer on OS X 10.5 using the Apple-supplied - Python 2.5. - * It is safest to start each variant build with an empty source directory - populated with a fresh copy of the untarred source. + populated with a fresh copy of the untarred source or a source repo. * It is recommended that you remove any existing installed version of the Python being built:: sudo rm -rf /Library/Frameworks/Python.framework/Versions/n.n - -The Recipe ----------- - -Here are the steps you need to follow to build a Python installer: - -* Run ``build-installer.py``. Optionally you can pass a number of arguments - to specify locations of various files. Please see the top of - ``build-installer.py`` for its usage. - - Running this script takes some time, it will not only build Python itself - but also some 3th-party libraries that are needed for extensions. - -* When done the script will tell you where the DMG image is (by default - somewhere in ``/tmp/_py``). - -Building other universal installers -................................... - -It is also possible to build a 4-way universal installer that runs on -OS X 10.5 Leopard or later:: - - /usr/bin/python /build-installer.py \ - --dep-target=10.5 - --universal-archs=all - --sdk-path=/Developer/SDKs/MacOSX10.5.sdk - -This requires that the deployment target is 10.5, and hence -also that you are building on at least OS X 10.5. 4-way includes -``i386``, ``x86_64``, ``ppc``, and ``ppc64`` (G5). ``ppc64`` executable -variants can only be run on G5 machines running 10.5. Note that, -while OS X 10.6 is only supported on Intel-based machines, it is possible -to run ``ppc`` (32-bit) executables unmodified thanks to the Rosetta ppc -emulation in OS X 10.5 and 10.6. The 4-way installer variant must be -built with Xcode 3. It is not regularly built or tested. - -Other ``--universal-archs`` options are ``64-bit`` (``x86_64``, ``ppc64``), -and ``3-way`` (``ppc``, ``i386``, ``x86_64``). None of these options -are regularly exercised; use at your own risk. - - -Testing -------- - -Ideally, the resulting binaries should be installed and the test suite run -on all supported OS X releases and architectures. As a practical matter, -that is generally not possible. At a minimum, variant 1 should be run on -a PPC G4 system with OS X 10.5 and at least one Intel system running OS X -10.9, 10.8, 10.7, 10.6, or 10.5. Variant 2 should be run on 10.9, 10.8, -10.7, and 10.6 systems in both 32-bit and 64-bit modes.:: - - /usr/local/bin/pythonn.n -m test -w -u all,-largefile - /usr/local/bin/pythonn.n-32 -m test -w -u all - -Certain tests will be skipped and some cause the interpreter to fail -which will likely generate ``Python quit unexpectedly`` alert messages -to be generated at several points during a test run. These are normal -during testing and can be ignored. - -It is also recommend to launch IDLE and verify that it is at least -functional. Double-click on the IDLE app icon in ``/Applications/Python n.n``. -It should also be tested from the command line:: - - /usr/local/bin/idlen.n - diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index a2cba3210211d..4fab4882efaeb 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -2,6 +2,20 @@ """ This script is used to build "official" universal installers on macOS. +NEW for 3.9.0 and backports: +- 2.7 end-of-life issues: + - Python 3 installs now update the Current version link + in /Library/Frameworks/Python.framework/Versions +- fully support running under Python 3 as well as 2.7 +- support building on newer macOS systems with SIP +- fully support building on macOS 10.9+ +- support 10.6+ on best effort +- support bypassing docs build by supplying a prebuilt + docs html tarball in the third-party source library, + in the format and filename conventional of those + downloadable from python.org: + python-3.x.y-docs-html.tar.bz2 + NEW for 3.7.0: - support Intel 64-bit-only () and 32-bit-only installer builds - build and use internal Tcl/Tk 8.6 for 10.6+ builds @@ -14,28 +28,7 @@ - use generic "gcc" as compiler (CC env var) rather than "gcc-4.2" TODO: -- support SDKROOT and DEVELOPER_DIR xcrun env variables -- test with 10.5 and 10.4 and determine support status - -Please ensure that this script keeps working with Python 2.5, to avoid -bootstrap issues (/usr/bin/python is Python 2.5 on OSX 10.5). Doc builds -use current versions of Sphinx and require a reasonably current python3. -Sphinx and dependencies are installed into a venv using the python3's pip -so will fetch them from PyPI if necessary. Since python3 is now used for -Sphinx, build-installer.py should also be converted to use python3! - -For 3.7.0, when building for a 10.6 or higher deployment target, -build-installer builds and links with its own copy of Tcl/Tk 8.6. -Otherwise, it requires an installed third-party version of -Tcl/Tk 8.4 (for OS X 10.4 and 10.5 deployment targets), Tcl/TK 8.5 -(for 10.6 or later), or Tcl/TK 8.6 (for 10.9 or later) -installed in /Library/Frameworks. When installed, -the Python built by this script will attempt to dynamically link first to -Tcl and Tk frameworks in /Library/Frameworks if available otherwise fall -back to the ones in /System/Library/Framework. For the build, we recommend -installing the most recent ActiveTcl 8.6. 8.5, or 8.4 version, depending -on the deployment target. The actual version linked to depends on the -path of /Library/Frameworks/{Tcl,Tk}.framework/Versions/Current. +- test building with SDKROOT and DEVELOPER_DIR xcrun env variables Usage: see USAGE variable in the script. """ @@ -56,14 +49,15 @@ INCLUDE_TIMESTAMP = 1 VERBOSE = 1 -from plistlib import Plist +RUNNING_ON_PYTHON2 = sys.version_info.major == 2 -try: +if RUNNING_ON_PYTHON2: from plistlib import writePlist -except ImportError: - # We're run using python2.3 - def writePlist(plist, path): - plist.write(path) +else: + from plistlib import dump + def writePlist(path, plist): + with open(plist, 'wb') as fp: + dump(path, fp) def shellQuote(value): """ @@ -1096,7 +1090,7 @@ def buildPythonDocs(): if not os.path.exists(htmlDir): # Create virtual environment for docs builds with blurb and sphinx runCommand('make venv') - runCommand('venv/bin/python3 -m pip install -U Sphinx==2.2.0') + runCommand('venv/bin/python3 -m pip install -U Sphinx==2.3.1') runCommand('make html PYTHON=venv/bin/python') os.rename(htmlDir, docdir) os.chdir(curDir) @@ -1125,8 +1119,7 @@ def buildPython(): # Since the extra libs are not in their installed framework location # during the build, augment the library path so that the interpreter # will find them during its extension import sanity checks. - os.environ['DYLD_LIBRARY_PATH'] = os.path.join(WORKDIR, - 'libraries', 'usr', 'local', 'lib') + print("Running configure...") runCommand("%s -C --enable-framework --enable-universalsdk=/ " "--with-universal-archs=%s " @@ -1134,12 +1127,15 @@ def buildPython(): "%s " "%s " "%s " + "%s " "LDFLAGS='-g -L%s/libraries/usr/local/lib' " "CFLAGS='-g -I%s/libraries/usr/local/include' 2>&1"%( shellQuote(os.path.join(SRCDIR, 'configure')), UNIVERSALARCHS, (' ', '--with-computed-gotos ')[PYTHON_3], (' ', '--without-ensurepip ')[PYTHON_3], + (' ', "--with-openssl='%s/libraries/usr/local'"%( + shellQuote(WORKDIR)[1:-1],))[PYTHON_3], (' ', "--with-tcltk-includes='-I%s/libraries/usr/local/include'"%( shellQuote(WORKDIR)[1:-1],))[internalTk()], (' ', "--with-tcltk-libs='-L%s/libraries/usr/local/lib -ltcl8.6 -ltk8.6'"%( @@ -1147,6 +1143,24 @@ def buildPython(): shellQuote(WORKDIR)[1:-1], shellQuote(WORKDIR)[1:-1])) + # As of macOS 10.11 with SYSTEM INTEGRITY PROTECTION, DYLD_* + # environment variables are no longer automatically inherited + # by child processes from their parents. We used to just set + # DYLD_LIBRARY_PATH, pointing to the third-party libs, + # in build-installer.py's process environment and it was + # passed through the make utility into the environment of + # setup.py. Instead, we now append DYLD_LIBRARY_PATH to + # the existing RUNSHARED configuration value when we call + # make for extension module builds. + + runshared_for_make = "".join([ + " RUNSHARED=", + "'", + grepValue("Makefile", "RUNSHARED"), + ' DYLD_LIBRARY_PATH=', + os.path.join(WORKDIR, 'libraries', 'usr', 'local', 'lib'), + "'" ]) + # Look for environment value BUILDINSTALLER_BUILDPYTHON_MAKE_EXTRAS # and, if defined, append its value to the make command. This allows # us to pass in version control tags, like GITTAG, to a build from a @@ -1161,21 +1175,24 @@ def buildPython(): make_extras = os.getenv("BUILDINSTALLER_BUILDPYTHON_MAKE_EXTRAS") if make_extras: - make_cmd = "make " + make_extras + make_cmd = "make " + make_extras + runshared_for_make else: - make_cmd = "make" + make_cmd = "make" + runshared_for_make print("Running " + make_cmd) runCommand(make_cmd) - print("Running make install") - runCommand("make install DESTDIR=%s"%( - shellQuote(rootDir))) + make_cmd = "make install DESTDIR=%s %s"%( + shellQuote(rootDir), + runshared_for_make) + print("Running " + make_cmd) + runCommand(make_cmd) - print("Running make frameworkinstallextras") - runCommand("make frameworkinstallextras DESTDIR=%s"%( - shellQuote(rootDir))) + make_cmd = "make frameworkinstallextras DESTDIR=%s %s"%( + shellQuote(rootDir), + runshared_for_make) + print("Running " + make_cmd) + runCommand(make_cmd) - del os.environ['DYLD_LIBRARY_PATH'] print("Copying required shared libraries") if os.path.exists(os.path.join(WORKDIR, 'libraries', 'Library')): build_lib_dir = os.path.join( @@ -1304,7 +1321,13 @@ def buildPython(): data = fp.read() fp.close() # create build_time_vars dict - exec(data) + if RUNNING_ON_PYTHON2: + exec(data) + else: + g_dict = {} + l_dict = {} + exec(data, g_dict, l_dict) + build_time_vars = l_dict['build_time_vars'] vars = {} for k, v in build_time_vars.items(): if type(v) == type(''): @@ -1421,7 +1444,7 @@ def packageFromRecipe(targetDir, recipe): vers = getFullVersion() major, minor = getVersionMajorMinor() - pl = Plist( + pl = dict( CFBundleGetInfoString="Python.%s %s"%(pkgname, vers,), CFBundleIdentifier='org.python.Python.%s'%(pkgname,), CFBundleName='Python.%s'%(pkgname,), @@ -1443,7 +1466,7 @@ def packageFromRecipe(targetDir, recipe): ) writePlist(pl, os.path.join(packageContents, 'Info.plist')) - pl = Plist( + pl = dict( IFPkgDescriptionDescription=readme, IFPkgDescriptionTitle=recipe.get('long_name', "Python.%s"%(pkgname,)), IFPkgDescriptionVersion=vers, @@ -1459,7 +1482,7 @@ def makeMpkgPlist(path): vers = getFullVersion() major, minor = getVersionMajorMinor() - pl = Plist( + pl = dict( CFBundleGetInfoString="Python %s"%(vers,), CFBundleIdentifier='org.python.Python', CFBundleName='Python', @@ -1512,7 +1535,7 @@ def buildInstaller(): os.mkdir(rsrcDir) makeMpkgPlist(os.path.join(pkgroot, 'Info.plist')) - pl = Plist( + pl = dict( IFPkgDescriptionTitle="Python", IFPkgDescriptionVersion=getVersion(), ) diff --git a/Mac/BuildScript/resources/ReadMe.rtf b/Mac/BuildScript/resources/ReadMe.rtf index 086ab42b38936..a4dd8b5ee4102 100644 --- a/Mac/BuildScript/resources/ReadMe.rtf +++ b/Mac/BuildScript/resources/ReadMe.rtf @@ -1,4 +1,4 @@ -{\rtf1\ansi\ansicpg1252\cocoartf2511 +{\rtf1\ansi\ansicpg1252\cocoartf2513 \cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fswiss\fcharset0 Helvetica-Oblique; \f3\fmodern\fcharset0 CourierNewPSMT;} {\colortbl;\red255\green255\blue255;} @@ -11,7 +11,7 @@ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 \f1\b \cf0 NOTE: -\f0\b0 This is an alpha test preview of Python 3.9.0, the next feature release of Python 3. It is not intended for production use.\ +\f0\b0 This is an alpha test preview of Python 3.10.0, the next feature release of Python 3. It is not intended for production use.\ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 \cf0 \ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 @@ -56,12 +56,6 @@ Due to new security checks on macOS 10.15 Catalina, when launching IDLE macOS ma \f0\b0 button to proceed.\ \ -\f1\b \ul macOS 10.15 (Catalina) Gatekeeper Requirements [changed in 3.9.0a4]\ - -\f0\b0 \ulnone \ -As of 2020-02-03, Apple has changed how third-party installer packages, like those provided by python.org, are notarized for verification by Gatekeeper and begun enforcing additional requirements such as code signing and use of the hardened runtime. As of 3.9.0a4, python.org installer packages now meet those additional notarization requirements. The necessary changes in packaging should be transparent to your use of Python but, in the unlikely event that you encounter changes in behavior between 3.9.0a4 and earlier 3.9.0 alphas in areas like ctypes, importlib, or mmap, please check bugs.python.org for existing reports and, if necessary, open a new issue.\ -\ - \f1\b \ul Other changes\ \f0\b0 \ulnone \ diff --git a/Mac/BuildScript/resources/Welcome.rtf b/Mac/BuildScript/resources/Welcome.rtf index bf6aebe92483f..cadfbf6a54c36 100644 --- a/Mac/BuildScript/resources/Welcome.rtf +++ b/Mac/BuildScript/resources/Welcome.rtf @@ -1,5 +1,5 @@ -{\rtf1\ansi\ansicpg1252\cocoartf1671\cocoasubrtf600 -\cocoascreenfonts1{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT; +{\rtf1\ansi\ansicpg1252\cocoartf2513 +\cocoascreenfonts1\cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fmodern\fcharset0 CourierNewPSMT; } {\colortbl;\red255\green255\blue255;} {\*\expandedcolortbl;;} @@ -26,5 +26,5 @@ At the end of this install, click on \ \f1\b NOTE: -\f0\b0 This is an alpha test preview of Python 3.9.0, the next feature release of Python 3. It is not intended for production use.\ +\f0\b0 This is an alpha test preview of Python 3.10.0, the next feature release of Python 3. It is not intended for production use.\ } \ No newline at end of file diff --git a/Mac/PythonLauncher/factorySettings.plist b/Mac/PythonLauncher/factorySettings.plist index 120242135ce40..6f650ae761614 100644 --- a/Mac/PythonLauncher/factorySettings.plist +++ b/Mac/PythonLauncher/factorySettings.plist @@ -10,9 +10,9 @@ interpreter_list - /usr/local/bin/pythonw - /usr/bin/pythonw - /sw/bin/pythonw + /usr/local/bin/python3 + /opt/local/bin/python3 + /sw/bin/python3 honourhashbang @@ -35,12 +35,9 @@ interpreter_list - /usr/local/bin/pythonw - /usr/local/bin/python - /usr/bin/pythonw - /usr/bin/python - /sw/bin/pythonw - /sw/bin/python + /usr/local/bin/python3 + /opt/local/bin/python3 + /sw/bin/python3 honourhashbang @@ -63,12 +60,9 @@ interpreter_list - /usr/local/bin/pythonw - /usr/local/bin/python - /usr/bin/pythonw - /usr/bin/python - /sw/bin/pythonw - /sw/bin/python + /usr/local/bin/python3 + /opt/local/bin/python3 + /sw/bin/python3 honourhashbang diff --git a/Mac/Resources/app/Info.plist.in b/Mac/Resources/app/Info.plist.in index 66b5e764c54b0..1d624984a8520 100644 --- a/Mac/Resources/app/Info.plist.in +++ b/Mac/Resources/app/Info.plist.in @@ -20,7 +20,7 @@ CFBundleExecutable Python CFBundleGetInfoString - %version%, (c) 2001-2016 Python Software Foundation. + %version%, (c) 2001-2020 Python Software Foundation. CFBundleHelpBookFolder Documentation @@ -55,7 +55,7 @@ NSAppleScriptEnabled NSHumanReadableCopyright - (c) 2001-2016 Python Software Foundation. + (c) 2001-2020 Python Software Foundation. NSHighResolutionCapable From webhook-mailer at python.org Thu Jun 25 05:12:46 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 25 Jun 2020 09:12:46 -0000 Subject: [Python-checkins] bpo-41074: Fix support of non-ASCII names and SQL in msilib. (GH-21126) Message-ID: https://github.com/python/cpython/commit/3f4de44cf7f3da754efbbc0e70feabf3b9384dce commit: 3f4de44cf7f3da754efbbc0e70feabf3b9384dce branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-25T02:12:41-07:00 summary: bpo-41074: Fix support of non-ASCII names and SQL in msilib. (GH-21126) * Fix support of non-ASCII names in functions OpenDatabase() and init_database(). * Fix support of non-ASCII SQL in method Database.OpenView(). (cherry picked from commit 55939b1708d6fc0d36d2be11ccdc6bf207e1bd41) Co-authored-by: Serhiy Storchaka files: A Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst M Lib/test/test_msilib.py M PC/_msi.c diff --git a/Lib/test/test_msilib.py b/Lib/test/test_msilib.py index f9bd0da7498e9..153a8ac05e560 100644 --- a/Lib/test/test_msilib.py +++ b/Lib/test/test_msilib.py @@ -1,13 +1,13 @@ """ Test suite for the code in msilib """ import os import unittest -from test.support import TESTFN, import_module, unlink +from test.support import TESTFN, FS_NONASCII, import_module, unlink msilib = import_module('msilib') import msilib.schema def init_database(): - path = TESTFN + '.msi' + path = TESTFN + (FS_NONASCII or '') + '.msi' db = msilib.init_database( path, msilib.schema, @@ -42,6 +42,16 @@ def test_view_fetch_returns_none(self): ) self.addCleanup(unlink, db_path) + def test_view_non_ascii(self): + db, db_path = init_database() + view = db.OpenView("SELECT '?-??????' FROM Property") + view.Execute(None) + record = view.Fetch() + self.assertEqual(record.GetString(1), '?-??????') + view.Close() + db.Close() + self.addCleanup(unlink, db_path) + def test_summaryinfo_getproperty_issue1104(self): db, db_path = init_database() try: diff --git a/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst b/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst new file mode 100644 index 0000000000000..ec91fd361c3de --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst @@ -0,0 +1,3 @@ +Fixed support of non-ASCII names in functions :func:`msilib.OpenDatabase` +and :func:`msilib.init_database` and non-ASCII SQL in method +:meth:`msilib.Database.OpenView`. diff --git a/PC/_msi.c b/PC/_msi.c index accbe7a720694..5079a524646f0 100644 --- a/PC/_msi.c +++ b/PC/_msi.c @@ -872,14 +872,14 @@ static PyObject* msidb_openview(msiobj *msidb, PyObject *args) { int status; - char *sql; + const wchar_t *sql; MSIHANDLE hView; msiobj *result; - if (!PyArg_ParseTuple(args, "s:OpenView", &sql)) + if (!PyArg_ParseTuple(args, "u:OpenView", &sql)) return NULL; - if ((status = MsiDatabaseOpenView(msidb->h, sql, &hView)) != ERROR_SUCCESS) + if ((status = MsiDatabaseOpenViewW(msidb->h, sql, &hView)) != ERROR_SUCCESS) return msierror(status); result = PyObject_NEW(struct msiobj, &msiview_Type); @@ -998,18 +998,18 @@ static PyTypeObject msidb_Type = { static PyObject* msiopendb(PyObject *obj, PyObject *args) { int status; - char *path; + const wchar_t *path; int persist; MSIHANDLE h; msiobj *result; - if (!PyArg_ParseTuple(args, "si:MSIOpenDatabase", &path, &persist)) + if (!PyArg_ParseTuple(args, "ui:MSIOpenDatabase", &path, &persist)) return NULL; /* We need to validate that persist is a valid MSIDBOPEN_* value. Otherwise, MsiOpenDatabase may treat the value as a pointer, leading to unexpected behavior. */ if (Py_INVALID_PERSIST(persist)) return msierror(ERROR_INVALID_PARAMETER); - status = MsiOpenDatabase(path, (LPCSTR)(SIZE_T)persist, &h); + status = MsiOpenDatabaseW(path, (LPCWSTR)(SIZE_T)persist, &h); if (status != ERROR_SUCCESS) return msierror(status); From webhook-mailer at python.org Thu Jun 25 06:15:43 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 25 Jun 2020 10:15:43 -0000 Subject: [Python-checkins] Forward port macOS installer updates from 3.7 (GH-21132) (GH-21137) Message-ID: https://github.com/python/cpython/commit/8127daba89468b1c3063a2f2765362bbce7cb746 commit: 8127daba89468b1c3063a2f2765362bbce7cb746 branch: 3.8 author: Ned Deily committer: GitHub date: 2020-06-25T06:15:37-04:00 summary: Forward port macOS installer updates from 3.7 (GH-21132) (GH-21137) - fix installer builds when using latest versions of Python 3 - fix installer builds on newer macOS releases with SIP - Python Launcher app factory defaults now use python3 - 3.x installer now updates the Current symlink in framework files: M Mac/BuildScript/README.rst M Mac/BuildScript/build-installer.py M Mac/BuildScript/resources/ReadMe.rtf M Mac/PythonLauncher/factorySettings.plist M Mac/Resources/app/Info.plist.in diff --git a/Mac/BuildScript/README.rst b/Mac/BuildScript/README.rst index f8b25fa5af19e..94a6bb28cadfa 100644 --- a/Mac/BuildScript/README.rst +++ b/Mac/BuildScript/README.rst @@ -7,216 +7,84 @@ framework-based Python out-of-tree, installs it in a funny place with $DESTROOT, massages that installation to remove .pyc files and such, creates an Installer package from the installation plus other files in ``resources`` and ``scripts`` and placed that on a ``.dmg`` disk image. - -For Python 3.4.0, PSF practice is to build two installer variants -for each release. - -1. 32-bit-only, i386 and PPC universal, capable on running on all machines - supported by Mac OS X 10.5 through (at least) 10.9:: - - /path/to/bootstrap/python2.7 build-installer.py \ - --sdk-path=/Developer/SDKs/MacOSX10.5.sdk \ - --universal-archs=32-bit \ - --dep-target=10.5 - - - builds the following third-party libraries - - * NCurses 5.9 (http://bugs.python.org/issue15037) - * SQLite 3.8.11 - * XZ 5.0.5 - - - uses system-supplied versions of third-party libraries - - * readline module links with Apple BSD editline (libedit) - - - requires ActiveState ``Tcl/Tk 8.4`` (currently 8.4.20) to be installed for building - - - recommended build environment: - - * Mac OS X 10.5.8 Intel or PPC - * Xcode 3.1.4 - * ``MacOSX10.5`` SDK - * ``MACOSX_DEPLOYMENT_TARGET=10.5`` - * Apple ``gcc-4.2`` - * bootstrap non-framework Python 2.7 for documentation build with - Sphinx (as of 3.4.1) - - - alternate build environments: - - * Mac OS X 10.6.8 with Xcode 3.2.6 - - need to change ``/System/Library/Frameworks/{Tcl,Tk}.framework/Version/Current`` to ``8.4`` - * Note Xcode 4.* does not support building for PPC so cannot be used for this build - -2. 64-bit / 32-bit, x86_64 and i386 universal, for OS X 10.6 (and later):: +The installer package built on the dmg is a macOS bundle format installer +package. This format is deprecated and is no longer supported by modern +macOS systems; it is usable on macOS 10.6 and earlier systems. +To be usable on newer versions of macOS, the bits in the bundle package +must be assembled in a macOS flat installer package, using current +versions of the pkgbuild and productbuild utilities. To pass macoS +Gatekeeper download quarantine, the final package must be signed +with a valid Apple Developer ID certificate using productsign. +Starting with macOS 10.15 Catalina, Gatekeeper now also requires +that installer packages are submitted to and pass Apple's automated +notarization service using the altool command. To pass notarization, +the binaries included in the package must be built with at least +the macOS 10.9 SDK, mout now be signed with the codesign utility +and executables must opt in to the hardened run time option with +any necessary entitlements. Details of these processes are +available in the on-line Apple Developer Documentation and man pages. + +As of 3.8.0 and 3.7.7, PSF practice is to build one installer variants +for each release. Note that as of this writing, no Pythons support +building on a newer version of macOS that will run on older versions +by setting MACOSX_DEPLOYMENT_TARGET. This is because the various +Python C modules do not yet support runtime testing of macOS +feature availability (for example, by using macOS AvailabilityMacros.h +and weak-linking). To build a Python that is to be used on a +range of macOS releases, always build on the oldest release to be +supported; the necessary shared libraries for that release will +normally also be available on later systems, with the occasional +exception such as the removal of 32-bit libraries in macOS 10.15. + +build-installer requires Apple Developer tools, either from the +Command Line Tools package or from a full Xcode installation. +You should use the most recent version of either for the operating +system version in use. (One notable exception: on macOS 10.6, +Snow Leopard, use Xcode 3, not Xcode 4 which was released later +in the 10.6 support cycle.) + +1. 64-bit, x86_64, for OS X 10.9 (and later):: /path/to/bootstrap/python2.7 build-installer.py \ - --sdk-path=/Developer/SDKs/MacOSX10.6.sdk \ - --universal-archs=intel \ - --dep-target=10.6 + --universal-archs=intel-64 \ + --dep-target=10.9 - builds the following third-party libraries - * NCurses 5.9 (http://bugs.python.org/issue15037) - * SQLite 3.8.11 - * XZ 5.0.5 + * OpenSSL 1.1.1 + * Tcl/Tk 8.6 + * NCurses + * SQLite + * XZ + * libffi - uses system-supplied versions of third-party libraries * readline module links with Apple BSD editline (libedit) - - - requires ActiveState Tcl/Tk 8.5.15.1 (or later) to be installed for building - - - recommended build environment: - - * Mac OS X 10.6.8 (or later) - * Xcode 3.2.6 - * ``MacOSX10.6`` SDK - * ``MACOSX_DEPLOYMENT_TARGET=10.6`` - * Apple ``gcc-4.2`` - * bootstrap non-framework Python 2.7 for documentation build with - Sphinx (as of 3.4.1) - - - alternate build environments: - - * none. Xcode 4.x currently supplies two C compilers. - ``llvm-gcc-4.2.1`` has been found to miscompile Python 3.3.x and - produce a non-functional Python executable. As it appears to be - considered a migration aid by Apple and is not likely to be fixed, - its use should be avoided. The other compiler, ``clang``, has been - undergoing rapid development. While it appears to have become - production-ready in the most recent Xcode 5 releases, the versions - available on the deprecated Xcode 4.x for 10.6 were early releases - and did not receive the level of exposure in production environments - that the Xcode 3 gcc-4.2 compiler has had. - - -* For Python 2.7.x and 3.2.x, the 32-bit-only installer was configured to - support Mac OS X 10.3.9 through (at least) 10.6. Because it is - believed that there are few systems still running OS X 10.3 or 10.4 - and because it has become increasingly difficult to test and - support the differences in these earlier systems, as of Python 3.3.0 the PSF - 32-bit installer no longer supports them. For reference in building such - an installer yourself, the details are:: - - /usr/bin/python build-installer.py \ - --sdk-path=/Developer/SDKs/MacOSX10.4u.sdk \ - --universal-archs=32-bit \ - --dep-target=10.3 - - - builds the following third-party libraries - - * Bzip2 - * NCurses - * GNU Readline (GPL) - * SQLite 3 - * XZ - * Zlib 1.2.3 - * Oracle Sleepycat DB 4.8 (Python 2.x only) - - - requires ActiveState ``Tcl/Tk 8.4`` (currently 8.4.20) to be installed for building + * zlib + * bz2 - recommended build environment: - * Mac OS X 10.5.8 PPC or Intel - * Xcode 3.1.4 (or later) - * ``MacOSX10.4u`` SDK (later SDKs do not support PPC G3 processors) - * ``MACOSX_DEPLOYMENT_TARGET=10.3`` - * Apple ``gcc-4.0`` - * system Python 2.5 for documentation build with Sphinx - - - alternate build environments: - - * Mac OS X 10.6.8 with Xcode 3.2.6 - - need to change ``/System/Library/Frameworks/{Tcl,Tk}.framework/Version/Current`` to ``8.4`` - + * Mac OS X 10.9.5 + * Xcode Command Line Tools 6.2 + * ``MacOSX10.9`` SDK + * ``MACOSX_DEPLOYMENT_TARGET=10.9`` + * Apple ``clang`` General Prerequisites --------------------- -* No Fink (in ``/sw``) or MacPorts (in ``/opt/local``) or other local - libraries or utilities (in ``/usr/local``) as they could +* No Fink (in ``/sw``) or MacPorts (in ``/opt/local``) or Homebrew or + other local libraries or utilities (in ``/usr/local``) as they could interfere with the build. -* The documentation for the release is built using Sphinx - because it is included in the installer. For 2.7.x and 3.x.x up to and - including 3.4.0, the ``Doc/Makefile`` uses ``svn`` to download repos of - ``Sphinx`` and its dependencies. Beginning with 3.4.1, the ``Doc/Makefile`` - assumes there is an externally-provided ``sphinx-build`` and requires at - least Python 2.6 to run. Because of this, it is no longer possible to - build a 3.4.1 or later installer on OS X 10.5 using the Apple-supplied - Python 2.5. - * It is safest to start each variant build with an empty source directory - populated with a fresh copy of the untarred source. + populated with a fresh copy of the untarred source or a source repo. * It is recommended that you remove any existing installed version of the Python being built:: sudo rm -rf /Library/Frameworks/Python.framework/Versions/n.n - -The Recipe ----------- - -Here are the steps you need to follow to build a Python installer: - -* Run ``build-installer.py``. Optionally you can pass a number of arguments - to specify locations of various files. Please see the top of - ``build-installer.py`` for its usage. - - Running this script takes some time, it will not only build Python itself - but also some 3th-party libraries that are needed for extensions. - -* When done the script will tell you where the DMG image is (by default - somewhere in ``/tmp/_py``). - -Building other universal installers -................................... - -It is also possible to build a 4-way universal installer that runs on -OS X 10.5 Leopard or later:: - - /usr/bin/python /build-installer.py \ - --dep-target=10.5 - --universal-archs=all - --sdk-path=/Developer/SDKs/MacOSX10.5.sdk - -This requires that the deployment target is 10.5, and hence -also that you are building on at least OS X 10.5. 4-way includes -``i386``, ``x86_64``, ``ppc``, and ``ppc64`` (G5). ``ppc64`` executable -variants can only be run on G5 machines running 10.5. Note that, -while OS X 10.6 is only supported on Intel-based machines, it is possible -to run ``ppc`` (32-bit) executables unmodified thanks to the Rosetta ppc -emulation in OS X 10.5 and 10.6. The 4-way installer variant must be -built with Xcode 3. It is not regularly built or tested. - -Other ``--universal-archs`` options are ``64-bit`` (``x86_64``, ``ppc64``), -and ``3-way`` (``ppc``, ``i386``, ``x86_64``). None of these options -are regularly exercised; use at your own risk. - - -Testing -------- - -Ideally, the resulting binaries should be installed and the test suite run -on all supported OS X releases and architectures. As a practical matter, -that is generally not possible. At a minimum, variant 1 should be run on -a PPC G4 system with OS X 10.5 and at least one Intel system running OS X -10.9, 10.8, 10.7, 10.6, or 10.5. Variant 2 should be run on 10.9, 10.8, -10.7, and 10.6 systems in both 32-bit and 64-bit modes.:: - - /usr/local/bin/pythonn.n -m test -w -u all,-largefile - /usr/local/bin/pythonn.n-32 -m test -w -u all - -Certain tests will be skipped and some cause the interpreter to fail -which will likely generate ``Python quit unexpectedly`` alert messages -to be generated at several points during a test run. These are normal -during testing and can be ignored. - -It is also recommend to launch IDLE and verify that it is at least -functional. Double-click on the IDLE app icon in ``/Applications/Python n.n``. -It should also be tested from the command line:: - - /usr/local/bin/idlen.n - diff --git a/Mac/BuildScript/build-installer.py b/Mac/BuildScript/build-installer.py index 9d3e2a785a6c6..4fab4882efaeb 100755 --- a/Mac/BuildScript/build-installer.py +++ b/Mac/BuildScript/build-installer.py @@ -2,6 +2,20 @@ """ This script is used to build "official" universal installers on macOS. +NEW for 3.9.0 and backports: +- 2.7 end-of-life issues: + - Python 3 installs now update the Current version link + in /Library/Frameworks/Python.framework/Versions +- fully support running under Python 3 as well as 2.7 +- support building on newer macOS systems with SIP +- fully support building on macOS 10.9+ +- support 10.6+ on best effort +- support bypassing docs build by supplying a prebuilt + docs html tarball in the third-party source library, + in the format and filename conventional of those + downloadable from python.org: + python-3.x.y-docs-html.tar.bz2 + NEW for 3.7.0: - support Intel 64-bit-only () and 32-bit-only installer builds - build and use internal Tcl/Tk 8.6 for 10.6+ builds @@ -14,28 +28,7 @@ - use generic "gcc" as compiler (CC env var) rather than "gcc-4.2" TODO: -- support SDKROOT and DEVELOPER_DIR xcrun env variables -- test with 10.5 and 10.4 and determine support status - -Please ensure that this script keeps working with Python 2.5, to avoid -bootstrap issues (/usr/bin/python is Python 2.5 on OSX 10.5). Doc builds -use current versions of Sphinx and require a reasonably current python3. -Sphinx and dependencies are installed into a venv using the python3's pip -so will fetch them from PyPI if necessary. Since python3 is now used for -Sphinx, build-installer.py should also be converted to use python3! - -For 3.7.0, when building for a 10.6 or higher deployment target, -build-installer builds and links with its own copy of Tcl/Tk 8.6. -Otherwise, it requires an installed third-party version of -Tcl/Tk 8.4 (for OS X 10.4 and 10.5 deployment targets), Tcl/TK 8.5 -(for 10.6 or later), or Tcl/TK 8.6 (for 10.9 or later) -installed in /Library/Frameworks. When installed, -the Python built by this script will attempt to dynamically link first to -Tcl and Tk frameworks in /Library/Frameworks if available otherwise fall -back to the ones in /System/Library/Framework. For the build, we recommend -installing the most recent ActiveTcl 8.6. 8.5, or 8.4 version, depending -on the deployment target. The actual version linked to depends on the -path of /Library/Frameworks/{Tcl,Tk}.framework/Versions/Current. +- test building with SDKROOT and DEVELOPER_DIR xcrun env variables Usage: see USAGE variable in the script. """ @@ -56,14 +49,15 @@ INCLUDE_TIMESTAMP = 1 VERBOSE = 1 -from plistlib import Plist +RUNNING_ON_PYTHON2 = sys.version_info.major == 2 -try: +if RUNNING_ON_PYTHON2: from plistlib import writePlist -except ImportError: - # We're run using python2.3 - def writePlist(plist, path): - plist.write(path) +else: + from plistlib import dump + def writePlist(path, plist): + with open(plist, 'wb') as fp: + dump(path, fp) def shellQuote(value): """ @@ -1066,14 +1060,40 @@ def buildPythonDocs(): curDir = os.getcwd() os.chdir(buildDir) runCommand('make clean') - # Create virtual environment for docs builds with blurb and sphinx - runCommand('make venv') - runCommand('venv/bin/python3 -m pip install -U Sphinx==2.0.1') - runCommand('make html PYTHON=venv/bin/python') + + # Search third-party source directory for a pre-built version of the docs. + # Use the naming convention of the docs.python.org html downloads: + # python-3.9.0b1-docs-html.tar.bz2 + doctarfiles = [ f for f in os.listdir(DEPSRC) + if f.startswith('python-'+getFullVersion()) + if f.endswith('-docs-html.tar.bz2') ] + if doctarfiles: + doctarfile = doctarfiles[0] + if not os.path.exists('build'): + os.mkdir('build') + # if build directory existed, it was emptied by make clean, above + os.chdir('build') + # Extract the first archive found for this version into build + runCommand('tar xjf %s'%shellQuote(os.path.join(DEPSRC, doctarfile))) + # see if tar extracted a directory ending in -docs-html + archivefiles = [ f for f in os.listdir('.') + if f.endswith('-docs-html') + if os.path.isdir(f) ] + if archivefiles: + archivefile = archivefiles[0] + # make it our 'Docs/build/html' directory + print(' -- using pre-built python documentation from %s'%archivefile) + os.rename(archivefile, 'html') + os.chdir(buildDir) + + htmlDir = os.path.join('build', 'html') + if not os.path.exists(htmlDir): + # Create virtual environment for docs builds with blurb and sphinx + runCommand('make venv') + runCommand('venv/bin/python3 -m pip install -U Sphinx==2.3.1') + runCommand('make html PYTHON=venv/bin/python') + os.rename(htmlDir, docdir) os.chdir(curDir) - if not os.path.exists(docdir): - os.mkdir(docdir) - os.rename(os.path.join(buildDir, 'build', 'html'), docdir) def buildPython(): @@ -1099,8 +1119,7 @@ def buildPython(): # Since the extra libs are not in their installed framework location # during the build, augment the library path so that the interpreter # will find them during its extension import sanity checks. - os.environ['DYLD_LIBRARY_PATH'] = os.path.join(WORKDIR, - 'libraries', 'usr', 'local', 'lib') + print("Running configure...") runCommand("%s -C --enable-framework --enable-universalsdk=/ " "--with-universal-archs=%s " @@ -1108,12 +1127,15 @@ def buildPython(): "%s " "%s " "%s " + "%s " "LDFLAGS='-g -L%s/libraries/usr/local/lib' " "CFLAGS='-g -I%s/libraries/usr/local/include' 2>&1"%( shellQuote(os.path.join(SRCDIR, 'configure')), UNIVERSALARCHS, (' ', '--with-computed-gotos ')[PYTHON_3], (' ', '--without-ensurepip ')[PYTHON_3], + (' ', "--with-openssl='%s/libraries/usr/local'"%( + shellQuote(WORKDIR)[1:-1],))[PYTHON_3], (' ', "--with-tcltk-includes='-I%s/libraries/usr/local/include'"%( shellQuote(WORKDIR)[1:-1],))[internalTk()], (' ', "--with-tcltk-libs='-L%s/libraries/usr/local/lib -ltcl8.6 -ltk8.6'"%( @@ -1121,6 +1143,24 @@ def buildPython(): shellQuote(WORKDIR)[1:-1], shellQuote(WORKDIR)[1:-1])) + # As of macOS 10.11 with SYSTEM INTEGRITY PROTECTION, DYLD_* + # environment variables are no longer automatically inherited + # by child processes from their parents. We used to just set + # DYLD_LIBRARY_PATH, pointing to the third-party libs, + # in build-installer.py's process environment and it was + # passed through the make utility into the environment of + # setup.py. Instead, we now append DYLD_LIBRARY_PATH to + # the existing RUNSHARED configuration value when we call + # make for extension module builds. + + runshared_for_make = "".join([ + " RUNSHARED=", + "'", + grepValue("Makefile", "RUNSHARED"), + ' DYLD_LIBRARY_PATH=', + os.path.join(WORKDIR, 'libraries', 'usr', 'local', 'lib'), + "'" ]) + # Look for environment value BUILDINSTALLER_BUILDPYTHON_MAKE_EXTRAS # and, if defined, append its value to the make command. This allows # us to pass in version control tags, like GITTAG, to a build from a @@ -1135,21 +1175,24 @@ def buildPython(): make_extras = os.getenv("BUILDINSTALLER_BUILDPYTHON_MAKE_EXTRAS") if make_extras: - make_cmd = "make " + make_extras + make_cmd = "make " + make_extras + runshared_for_make else: - make_cmd = "make" + make_cmd = "make" + runshared_for_make print("Running " + make_cmd) runCommand(make_cmd) - print("Running make install") - runCommand("make install DESTDIR=%s"%( - shellQuote(rootDir))) + make_cmd = "make install DESTDIR=%s %s"%( + shellQuote(rootDir), + runshared_for_make) + print("Running " + make_cmd) + runCommand(make_cmd) - print("Running make frameworkinstallextras") - runCommand("make frameworkinstallextras DESTDIR=%s"%( - shellQuote(rootDir))) + make_cmd = "make frameworkinstallextras DESTDIR=%s %s"%( + shellQuote(rootDir), + runshared_for_make) + print("Running " + make_cmd) + runCommand(make_cmd) - del os.environ['DYLD_LIBRARY_PATH'] print("Copying required shared libraries") if os.path.exists(os.path.join(WORKDIR, 'libraries', 'Library')): build_lib_dir = os.path.join( @@ -1278,7 +1321,13 @@ def buildPython(): data = fp.read() fp.close() # create build_time_vars dict - exec(data) + if RUNNING_ON_PYTHON2: + exec(data) + else: + g_dict = {} + l_dict = {} + exec(data, g_dict, l_dict) + build_time_vars = l_dict['build_time_vars'] vars = {} for k, v in build_time_vars.items(): if type(v) == type(''): @@ -1309,12 +1358,6 @@ def buildPython(): os.chdir(curdir) - if PYTHON_3: - # Remove the 'Current' link, that way we don't accidentally mess - # with an already installed version of python 2 - os.unlink(os.path.join(rootDir, 'Library', 'Frameworks', - 'Python.framework', 'Versions', 'Current')) - def patchFile(inPath, outPath): data = fileContents(inPath) data = data.replace('$FULL_VERSION', getFullVersion()) @@ -1401,7 +1444,7 @@ def packageFromRecipe(targetDir, recipe): vers = getFullVersion() major, minor = getVersionMajorMinor() - pl = Plist( + pl = dict( CFBundleGetInfoString="Python.%s %s"%(pkgname, vers,), CFBundleIdentifier='org.python.Python.%s'%(pkgname,), CFBundleName='Python.%s'%(pkgname,), @@ -1423,7 +1466,7 @@ def packageFromRecipe(targetDir, recipe): ) writePlist(pl, os.path.join(packageContents, 'Info.plist')) - pl = Plist( + pl = dict( IFPkgDescriptionDescription=readme, IFPkgDescriptionTitle=recipe.get('long_name', "Python.%s"%(pkgname,)), IFPkgDescriptionVersion=vers, @@ -1439,7 +1482,7 @@ def makeMpkgPlist(path): vers = getFullVersion() major, minor = getVersionMajorMinor() - pl = Plist( + pl = dict( CFBundleGetInfoString="Python %s"%(vers,), CFBundleIdentifier='org.python.Python', CFBundleName='Python', @@ -1492,7 +1535,7 @@ def buildInstaller(): os.mkdir(rsrcDir) makeMpkgPlist(os.path.join(pkgroot, 'Info.plist')) - pl = Plist( + pl = dict( IFPkgDescriptionTitle="Python", IFPkgDescriptionVersion=getVersion(), ) diff --git a/Mac/BuildScript/resources/ReadMe.rtf b/Mac/BuildScript/resources/ReadMe.rtf index 4cb0111d83a53..e09c5e97709d9 100644 --- a/Mac/BuildScript/resources/ReadMe.rtf +++ b/Mac/BuildScript/resources/ReadMe.rtf @@ -1,4 +1,4 @@ -{\rtf1\ansi\ansicpg1252\cocoartf2511 +{\rtf1\ansi\ansicpg1252\cocoartf2513 \cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fswiss\fcharset0 Helvetica-Oblique; \f3\fmodern\fcharset0 CourierNewPSMT;} {\colortbl;\red255\green255\blue255;} @@ -53,9 +53,29 @@ Due to new security checks on macOS 10.15 Catalina, when launching IDLE macOS ma macOS 10.15 (Catalina) Gatekeeper Requirements [changed in 3.8.2]\ \f0\b0 \ulnone \ -As of 2020-02-03, Apple has changed how third-party installer packages, like those provided by python.org, are notarized for verification by Gatekeeper and begun enforcing additional requirements such as code signing and use of the hardened runtime. As of 3.8.2, python.org installer packages now meet those additional notarization requirements. The necessary changes in packaging should be transparent to your use of Python but, in the unlikely event that you encounter changes in behavior between 3.8.1 and 3.8.2 in areas like ctypes, importlib, or mmap, please check bugs.python.org for existing reports and, if necessary, open a new issue.\ +As of 2020-02-03, Apple has changed how third-party installer packages, like those provided by python.org, are notarized for verification by Gatekeeper and begun enforcing additional requirements such as code signing and use of the hardened runtime. As of 3.8.2, python.org installer packages now meet those additional notarization requirements. The necessary changes in packaging should be transparent to your use of Python but, in the unlikely event that you encounter changes in behavior between 3.8.1 and newer 3.8.x releases in areas like ctypes, importlib, or mmap, please check bugs.python.org for existing reports and, if necessary, open a new issue.\ \f1\b \ul \ +Python 2.7 end-of-life [changed in 3.8.3]\ +\ +\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 + +\f0\b0 \cf0 \ulnone Python 2.7 has now reached end-of-life. As of Python 3.8.3, the +\f3 Python Launcher +\f0 app now has +\f3 python3 +\f0 factory defaults. Also, the +\f3 Current +\f0 link in the +\f3 /Library/Frameworks/Python.framework/Versions +\f0 directory is now updated to point to the Python 3 being installed; previously, only Python 2 installs updated +\f3 Current +\f0 . This change might affect developers using the framework to embed Python in their applications. If another version is desired for embedding, the +\f3 Current +\f0 symlink can be changed manually without affecting 3.8.x behavior.\ +\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 + +\f1\b \cf0 \ul \ulc0 \ Other changes\ \f0\b0 \ulnone \ diff --git a/Mac/PythonLauncher/factorySettings.plist b/Mac/PythonLauncher/factorySettings.plist index 120242135ce40..6f650ae761614 100644 --- a/Mac/PythonLauncher/factorySettings.plist +++ b/Mac/PythonLauncher/factorySettings.plist @@ -10,9 +10,9 @@ interpreter_list - /usr/local/bin/pythonw - /usr/bin/pythonw - /sw/bin/pythonw + /usr/local/bin/python3 + /opt/local/bin/python3 + /sw/bin/python3 honourhashbang @@ -35,12 +35,9 @@ interpreter_list - /usr/local/bin/pythonw - /usr/local/bin/python - /usr/bin/pythonw - /usr/bin/python - /sw/bin/pythonw - /sw/bin/python + /usr/local/bin/python3 + /opt/local/bin/python3 + /sw/bin/python3 honourhashbang @@ -63,12 +60,9 @@ interpreter_list - /usr/local/bin/pythonw - /usr/local/bin/python - /usr/bin/pythonw - /usr/bin/python - /sw/bin/pythonw - /sw/bin/python + /usr/local/bin/python3 + /opt/local/bin/python3 + /sw/bin/python3 honourhashbang diff --git a/Mac/Resources/app/Info.plist.in b/Mac/Resources/app/Info.plist.in index 66b5e764c54b0..1d624984a8520 100644 --- a/Mac/Resources/app/Info.plist.in +++ b/Mac/Resources/app/Info.plist.in @@ -20,7 +20,7 @@ CFBundleExecutable Python CFBundleGetInfoString - %version%, (c) 2001-2016 Python Software Foundation. + %version%, (c) 2001-2020 Python Software Foundation. CFBundleHelpBookFolder Documentation @@ -55,7 +55,7 @@ NSAppleScriptEnabled NSHumanReadableCopyright - (c) 2001-2016 Python Software Foundation. + (c) 2001-2020 Python Software Foundation. NSHighResolutionCapable From webhook-mailer at python.org Thu Jun 25 06:29:53 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 25 Jun 2020 10:29:53 -0000 Subject: [Python-checkins] Update macOS installer reader for 3.7.8 Message-ID: https://github.com/python/cpython/commit/934c1fa87318ea6ac936edb9155a6e1380bb4b57 commit: 934c1fa87318ea6ac936edb9155a6e1380bb4b57 branch: 3.7 author: Ned Deily committer: Ned Deily date: 2020-06-25T06:27:57-04:00 summary: Update macOS installer reader for 3.7.8 files: M Mac/BuildScript/resources/ReadMe.rtf diff --git a/Mac/BuildScript/resources/ReadMe.rtf b/Mac/BuildScript/resources/ReadMe.rtf index 10e9b2d64ca56..ca179305b2c12 100644 --- a/Mac/BuildScript/resources/ReadMe.rtf +++ b/Mac/BuildScript/resources/ReadMe.rtf @@ -1,4 +1,4 @@ -{\rtf1\ansi\ansicpg1252\cocoartf2511 +{\rtf1\ansi\ansicpg1252\cocoartf2513 \cocoatextscaling0\cocoaplatform0{\fonttbl\f0\fswiss\fcharset0 Helvetica;\f1\fswiss\fcharset0 Helvetica-Bold;\f2\fswiss\fcharset0 Helvetica-Oblique; \f3\fmodern\fcharset0 CourierNewPSMT;} {\colortbl;\red255\green255\blue255;} @@ -35,7 +35,7 @@ The bundled \ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -\f1\b \cf0 \ul \ulc0 Which installer variant should I use? +\f1\b \cf0 \ul Which installer variant should I use? \f0\b0 \ulnone \ \ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\partightenfactor0 @@ -46,7 +46,7 @@ The bundled \f0\b0 variant is no longer provided. \ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -\f1\b \cf0 \ul \ulc0 \ +\f1\b \cf0 \ul \ Using IDLE or other Tk applications \f0\b0 \ulnone \ \ @@ -63,13 +63,33 @@ Due to new security checks on macOS 10.15 Catalina, when launching IDLE macOS ma \f0\b0 file dialog windows. Click on the \f1\b OK \f0\b0 button to proceed.\ + +\f1\b \ul \ +Python 2.7 end-of-life [changed in 3.7.8]\ +\ \pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -\f1\b \cf0 \ul \ulc0 \ +\f0\b0 \cf0 \ulnone Python 2.7 has now reached end-of-life. As of Python 3.7.8, the +\f3 Python Launcher +\f0 app now has +\f3 python3 +\f0 factory defaults. Also, the +\f3 Current +\f0 link in the +\f3 /Library/Frameworks/Python.framework/Versions +\f0 directory is now updated to point to the Python 3 being installed; previously, only Python 2 installs updated +\f3 Current +\f0 . This change might affect developers using the framework to embed Python in their applications. If another version is desired for embedding, the +\f3 Current +\f0 symlink can be changed manually without affecting 3.7.x behavior.\ +\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 + +\f1\b \cf0 \ul \ Other changes\ \ +\pard\tx720\tx1440\tx2160\tx2880\tx3600\tx4320\tx5040\tx5760\tx6480\tx7200\tx7920\tx8640\pardirnatural\partightenfactor0 -\f0\b0 \ulnone For other changes in this release, see the +\f0\b0 \cf0 \ulnone For other changes in this release, see the \f2\i What's new \f0\i0 section in the {\field{\*\fldinst{HYPERLINK "https://www.python.org/doc/"}}{\fldrslt Documentation Set}} for this release and its \f2\i Release Notes From webhook-mailer at python.org Thu Jun 25 06:37:45 2020 From: webhook-mailer at python.org (Rick Heil) Date: Thu, 25 Jun 2020 10:37:45 -0000 Subject: [Python-checkins] bpo-39580: add check for CLI installation on macOS (GH-20271) Message-ID: https://github.com/python/cpython/commit/5f190d2cc60cd82a604cbffb58b6ca8f40350a7a commit: 5f190d2cc60cd82a604cbffb58b6ca8f40350a7a branch: master author: Rick Heil committer: GitHub date: 2020-06-25T06:37:40-04:00 summary: bpo-39580: add check for CLI installation on macOS (GH-20271) Adds a simple check for whether or not the package is being installed in the GUI or using installer on the command line. This addresses an issue where CLI-based software management tools (such as Munki) unexpectedly open Finder windows into a GUI session during installation runs. files: A Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst M Mac/BuildScript/scripts/postflight.documentation diff --git a/Mac/BuildScript/scripts/postflight.documentation b/Mac/BuildScript/scripts/postflight.documentation index 3cbbc1bf10ca2..ec48599cba76e 100755 --- a/Mac/BuildScript/scripts/postflight.documentation +++ b/Mac/BuildScript/scripts/postflight.documentation @@ -12,7 +12,9 @@ SHARE_DOCDIR_TO_FWK="../../.." # make link in /Applications/Python m.n/ for Finder users if [ -d "${APPDIR}" ]; then ln -fhs "${FWK_DOCDIR}/index.html" "${APPDIR}/Python Documentation.html" - open "${APPDIR}" || true # open the applications folder + if [ "${COMMAND_LINE_INSTALL}" != 1 ]; then + open "${APPDIR}" || true # open the applications folder + fi fi # make share/doc link in framework for command line users diff --git a/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst b/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst new file mode 100644 index 0000000000000..95d65359804d0 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst @@ -0,0 +1,2 @@ +Avoid opening Finder window if running installer from the command line. +Patch contributed by Rick Heil. From webhook-mailer at python.org Thu Jun 25 06:38:56 2020 From: webhook-mailer at python.org (Hai Shi) Date: Thu, 25 Jun 2020 10:38:56 -0000 Subject: [Python-checkins] bpo-40275: Use new test.support helper submodules in tests (GH-20849) Message-ID: https://github.com/python/cpython/commit/f7ba40b505989495c3585ed782070bdae56330ad commit: f7ba40b505989495c3585ed782070bdae56330ad branch: master author: Hai Shi committer: GitHub date: 2020-06-25T12:38:51+02:00 summary: bpo-40275: Use new test.support helper submodules in tests (GH-20849) files: M Lib/test/libregrtest/main.py M Lib/test/libregrtest/runtest.py M Lib/test/libregrtest/save_env.py M Lib/test/support/script_helper.py M Lib/test/test__xxsubinterpreters.py M Lib/test/test_array.py M Lib/test/test_cmd_line.py M Lib/test/test_dbm_dumb.py M Lib/test/test_decimal.py M Lib/test/test_global.py M Lib/test/test_imp.py M Lib/test/test_ioctl.py M Lib/test/test_platform.py M Lib/test/test_pwd.py M Lib/test/test_tix.py M Lib/test/test_uu.py M Lib/test/test_winreg.py diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index adf31cc94940d..3f9771b9308be 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -20,6 +20,7 @@ from test.libregrtest.pgo import setup_pgo_tests from test.libregrtest.utils import removepy, count, format_duration, printlist from test import support +from test.support import os_helper # bpo-38203: Maximum delay in seconds to exit Python (call Py_Finalize()). @@ -628,7 +629,7 @@ def main(self, tests=None, **kwargs): # to a temporary and writable directory. If it's not possible to # create or change the CWD, the original CWD will be used. # The original CWD is available from support.SAVEDCWD. - with support.temp_cwd(test_cwd, quiet=True): + with os_helper.temp_cwd(test_cwd, quiet=True): # When using multiprocessing, worker processes will use test_cwd # as their parent temporary directory. So when the main process # exit, it removes also subdirectories of worker processes. diff --git a/Lib/test/libregrtest/runtest.py b/Lib/test/libregrtest/runtest.py index 9338b28047954..e46cc31caea5a 100644 --- a/Lib/test/libregrtest/runtest.py +++ b/Lib/test/libregrtest/runtest.py @@ -11,6 +11,8 @@ import unittest from test import support +from test.support import import_helper +from test.support import os_helper from test.libregrtest.refleak import dash_R, clear_caches from test.libregrtest.save_env import saved_test_environment from test.libregrtest.utils import format_duration, print_warning @@ -216,7 +218,7 @@ def _runtest_inner2(ns, test_name): abstest = get_abs_module(ns, test_name) # remove the module from sys.module to reload it if it was already imported - support.unload(abstest) + import_helper.unload(abstest) the_module = importlib.import_module(abstest) @@ -313,7 +315,7 @@ def cleanup_test_droppings(test_name, verbose): # since if a test leaves a file open, it cannot be deleted by name (while # there's nothing we can do about that here either, we can display the # name of the offending test, which is a real help). - for name in (support.TESTFN,): + for name in (os_helper.TESTFN,): if not os.path.exists(name): continue diff --git a/Lib/test/libregrtest/save_env.py b/Lib/test/libregrtest/save_env.py index e7c27a698b06a..50ed35364961c 100644 --- a/Lib/test/libregrtest/save_env.py +++ b/Lib/test/libregrtest/save_env.py @@ -10,6 +10,7 @@ import urllib.request import warnings from test import support +from test.support import os_helper from test.libregrtest.utils import print_warning try: import _multiprocessing, multiprocessing.process @@ -241,7 +242,7 @@ def get_files(self): return sorted(fn + ('/' if os.path.isdir(fn) else '') for fn in os.listdir()) def restore_files(self, saved_value): - fn = support.TESTFN + fn = os_helper.TESTFN if fn not in saved_value and (fn + '/') not in saved_value: if os.path.isfile(fn): support.unlink(fn) diff --git a/Lib/test/support/script_helper.py b/Lib/test/support/script_helper.py index 37e576d4a770e..09bb586dcf79d 100644 --- a/Lib/test/support/script_helper.py +++ b/Lib/test/support/script_helper.py @@ -11,7 +11,7 @@ import zipfile from importlib.util import source_from_cache -from test.support import make_legacy_pyc +from test.support.import_helper import make_legacy_pyc # Cached result of the expensive test performed in the function below. diff --git a/Lib/test/test__xxsubinterpreters.py b/Lib/test/test__xxsubinterpreters.py index eab8f9f56c82a..cf34fc3d0e7af 100644 --- a/Lib/test/test__xxsubinterpreters.py +++ b/Lib/test/test__xxsubinterpreters.py @@ -10,10 +10,11 @@ import unittest from test import support +from test.support import import_helper from test.support import script_helper -interpreters = support.import_module('_xxsubinterpreters') +interpreters = import_helper.import_module('_xxsubinterpreters') ################################## diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py index f731b70415e7f..6af90dfb871d7 100644 --- a/Lib/test/test_array.py +++ b/Lib/test/test_array.py @@ -4,6 +4,7 @@ import unittest from test import support +from test.support import os_helper from test.support import _2G import weakref import pickle @@ -366,13 +367,13 @@ def test_insert(self): def test_tofromfile(self): a = array.array(self.typecode, 2*self.example) self.assertRaises(TypeError, a.tofile) - support.unlink(support.TESTFN) - f = open(support.TESTFN, 'wb') + os_helper.unlink(os_helper.TESTFN) + f = open(os_helper.TESTFN, 'wb') try: a.tofile(f) f.close() b = array.array(self.typecode) - f = open(support.TESTFN, 'rb') + f = open(os_helper.TESTFN, 'rb') self.assertRaises(TypeError, b.fromfile) b.fromfile(f, len(self.example)) self.assertEqual(b, array.array(self.typecode, self.example)) @@ -383,27 +384,27 @@ def test_tofromfile(self): finally: if not f.closed: f.close() - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) def test_fromfile_ioerror(self): # Issue #5395: Check if fromfile raises a proper OSError # instead of EOFError. a = array.array(self.typecode) - f = open(support.TESTFN, 'wb') + f = open(os_helper.TESTFN, 'wb') try: self.assertRaises(OSError, a.fromfile, f, len(self.example)) finally: f.close() - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) def test_filewrite(self): a = array.array(self.typecode, 2*self.example) - f = open(support.TESTFN, 'wb') + f = open(os_helper.TESTFN, 'wb') try: f.write(a) f.close() b = array.array(self.typecode) - f = open(support.TESTFN, 'rb') + f = open(os_helper.TESTFN, 'rb') b.fromfile(f, len(self.example)) self.assertEqual(b, array.array(self.typecode, self.example)) self.assertNotEqual(a, b) @@ -413,7 +414,7 @@ def test_filewrite(self): finally: if not f.closed: f.close() - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) def test_tofromlist(self): a = array.array(self.typecode, 2*self.example) diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py index 724402533038d..4794d446f08c7 100644 --- a/Lib/test/test_cmd_line.py +++ b/Lib/test/test_cmd_line.py @@ -9,6 +9,7 @@ import textwrap import unittest from test import support +from test.support import os_helper from test.support.script_helper import ( spawn_python, kill_python, assert_python_ok, assert_python_failure, interpreter_requires_environment @@ -141,11 +142,11 @@ def test_run_code(self): # All good if execution is successful assert_python_ok('-c', 'pass') - @unittest.skipUnless(support.FS_NONASCII, 'need support.FS_NONASCII') + @unittest.skipUnless(os_helper.FS_NONASCII, 'need os_helper.FS_NONASCII') def test_non_ascii(self): # Test handling of non-ascii data command = ("assert(ord(%r) == %s)" - % (support.FS_NONASCII, ord(support.FS_NONASCII))) + % (os_helper.FS_NONASCII, ord(os_helper.FS_NONASCII))) assert_python_ok('-c', command) # On Windows, pass bytes to subprocess doesn't test how Python decodes the @@ -463,8 +464,8 @@ def test_del___main__(self): # Issue #15001: PyRun_SimpleFileExFlags() did crash because it kept a # borrowed reference to the dict of __main__ module and later modify # the dict whereas the module was destroyed - filename = support.TESTFN - self.addCleanup(support.unlink, filename) + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) with open(filename, "w") as script: print("import sys", file=script) print("del sys.modules['__main__']", file=script) @@ -499,7 +500,7 @@ def test_isolatedmode(self): # dummyvar to prevent extraneous -E dummyvar="") self.assertEqual(out.strip(), b'1 1 1') - with support.temp_cwd() as tmpdir: + with os_helper.temp_cwd() as tmpdir: fake = os.path.join(tmpdir, "uuid.py") main = os.path.join(tmpdir, "main.py") with open(fake, "w") as f: @@ -561,7 +562,7 @@ def test_set_pycache_prefix(self): elif opt is not None: args[:0] = ['-X', f'pycache_prefix={opt}'] with self.subTest(envval=envval, opt=opt): - with support.temp_cwd(): + with os_helper.temp_cwd(): assert_python_ok(*args, **env) def run_xdev(self, *args, check_exitcode=True, xdev=True): @@ -644,7 +645,8 @@ def test_xdev(self): def check_warnings_filters(self, cmdline_option, envvar, use_pywarning=False): if use_pywarning: - code = ("import sys; from test.support import import_fresh_module; " + code = ("import sys; from test.support.import_helper import " + "import_fresh_module; " "warnings = import_fresh_module('warnings', blocked=['_warnings']); ") else: code = "import sys, warnings; " diff --git a/Lib/test/test_dbm_dumb.py b/Lib/test/test_dbm_dumb.py index 0a60778207d96..071cb845013b5 100644 --- a/Lib/test/test_dbm_dumb.py +++ b/Lib/test/test_dbm_dumb.py @@ -10,9 +10,11 @@ import unittest import dbm.dumb as dumbdbm from test import support +from test.support import os_helper from functools import partial -_fname = support.TESTFN +_fname = os_helper.TESTFN + def _delete_files(): for ext in [".dir", ".dat", ".bak"]: @@ -264,7 +266,7 @@ def test_invalid_flag(self): dumbdbm.open(_fname, flag) def test_readonly_files(self): - with support.temp_dir() as dir: + with os_helper.temp_dir() as dir: fname = os.path.join(dir, 'db') with dumbdbm.open(fname, 'n') as f: self.assertEqual(list(f.keys()), []) @@ -277,12 +279,12 @@ def test_readonly_files(self): self.assertEqual(sorted(f.keys()), sorted(self._dict)) f.close() # don't write - @unittest.skipUnless(support.TESTFN_NONASCII, + @unittest.skipUnless(os_helper.TESTFN_NONASCII, 'requires OS support of non-ASCII encodings') def test_nonascii_filename(self): - filename = support.TESTFN_NONASCII + filename = os_helper.TESTFN_NONASCII for suffix in ['.dir', '.dat', '.bak']: - self.addCleanup(support.unlink, filename + suffix) + self.addCleanup(os_helper.unlink, filename + suffix) with dumbdbm.open(filename, 'c') as db: db[b'key'] = b'value' self.assertTrue(os.path.exists(filename + '.dat')) diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py index ed483a4709527..716e6eb7fb127 100644 --- a/Lib/test/test_decimal.py +++ b/Lib/test/test_decimal.py @@ -34,8 +34,9 @@ import locale from test.support import (run_unittest, run_doctest, is_resource_enabled, requires_IEEE_754, requires_docstrings) -from test.support import (import_fresh_module, TestFailed, +from test.support import (TestFailed, run_with_locale, cpython_only) +from test.support.import_helper import import_fresh_module import random import inspect import threading diff --git a/Lib/test/test_global.py b/Lib/test/test_global.py index 8159602be98ee..c71d055297e0c 100644 --- a/Lib/test/test_global.py +++ b/Lib/test/test_global.py @@ -1,6 +1,7 @@ """Verify that warnings are issued for global statements following use.""" -from test.support import run_unittest, check_syntax_error, check_warnings +from test.support import run_unittest, check_syntax_error +from test.support.warnings_helper import check_warnings import unittest import warnings diff --git a/Lib/test/test_imp.py b/Lib/test/test_imp.py index fe394dc50c56b..4781d89a29574 100644 --- a/Lib/test/test_imp.py +++ b/Lib/test/test_imp.py @@ -5,6 +5,8 @@ import py_compile import sys from test import support +from test.support import import_helper +from test.support import os_helper from test.support import script_helper import unittest import warnings @@ -107,8 +109,8 @@ def test_issue3594(self): self.assertEqual(file.encoding, 'cp1252') finally: del sys.path[0] - support.unlink(temp_mod_name + '.py') - support.unlink(temp_mod_name + '.pyc') + os_helper.unlink(temp_mod_name + '.py') + os_helper.unlink(temp_mod_name + '.pyc') def test_issue5604(self): # Test cannot cover imp.load_compiled function. @@ -192,10 +194,10 @@ def test_issue5604(self): finally: del sys.path[0] for ext in ('.py', '.pyc'): - support.unlink(temp_mod_name + ext) - support.unlink(init_file_name + ext) - support.rmtree(test_package_name) - support.rmtree('__pycache__') + os_helper.unlink(temp_mod_name + ext) + os_helper.unlink(init_file_name + ext) + os_helper.rmtree(test_package_name) + os_helper.rmtree('__pycache__') def test_issue9319(self): path = os.path.dirname(__file__) @@ -204,7 +206,7 @@ def test_issue9319(self): def test_load_from_source(self): # Verify that the imp module can correctly load and find .py files - # XXX (ncoghlan): It would be nice to use support.CleanImport + # XXX (ncoghlan): It would be nice to use import_helper.CleanImport # here, but that breaks because the os module registers some # handlers in copy_reg on import. Since CleanImport doesn't # revert that registration, the module is left in a broken @@ -213,7 +215,7 @@ def test_load_from_source(self): # workaround orig_path = os.path orig_getenv = os.getenv - with support.EnvironmentVarGuard(): + with os_helper.EnvironmentVarGuard(): x = imp.find_module("os") self.addCleanup(x[0].close) new_os = imp.load_module("os", *x) @@ -299,11 +301,11 @@ def test_issue24748_load_module_skips_sys_modules_check(self): @unittest.skipIf(sys.dont_write_bytecode, "test meaningful only when writing bytecode") def test_bug7732(self): - with support.temp_cwd(): - source = support.TESTFN + '.py' + with os_helper.temp_cwd(): + source = os_helper.TESTFN + '.py' os.mkdir(source) self.assertRaisesRegex(ImportError, '^No module', - imp.find_module, support.TESTFN, ["."]) + imp.find_module, os_helper.TESTFN, ["."]) def test_multiple_calls_to_get_data(self): # Issue #18755: make sure multiple calls to get_data() can succeed. @@ -364,7 +366,7 @@ def test_pyc_invalidation_mode_from_cmdline(self): def test_find_and_load_checked_pyc(self): # issue 34056 - with support.temp_cwd(): + with os_helper.temp_cwd(): with open('mymod.py', 'wb') as fp: fp.write(b'x = 42\n') py_compile.compile( @@ -383,24 +385,24 @@ class ReloadTests(unittest.TestCase): reload().""" def test_source(self): - # XXX (ncoghlan): It would be nice to use test.support.CleanImport + # XXX (ncoghlan): It would be nice to use test.import_helper.CleanImport # here, but that breaks because the os module registers some # handlers in copy_reg on import. Since CleanImport doesn't # revert that registration, the module is left in a broken # state after reversion. Reinitialising the module contents # and just reverting os.environ to its previous state is an OK # workaround - with support.EnvironmentVarGuard(): + with os_helper.EnvironmentVarGuard(): import os imp.reload(os) def test_extension(self): - with support.CleanImport('time'): + with import_helper.CleanImport('time'): import time imp.reload(time) def test_builtin(self): - with support.CleanImport('marshal'): + with import_helper.CleanImport('marshal'): import marshal imp.reload(marshal) @@ -443,10 +445,10 @@ def test_source_from_cache(self): class NullImporterTests(unittest.TestCase): - @unittest.skipIf(support.TESTFN_UNENCODABLE is None, + @unittest.skipIf(os_helper.TESTFN_UNENCODABLE is None, "Need an undecodeable filename") def test_unencodeable(self): - name = support.TESTFN_UNENCODABLE + name = os_helper.TESTFN_UNENCODABLE os.mkdir(name) try: self.assertRaises(ImportError, imp.NullImporter, name) diff --git a/Lib/test/test_ioctl.py b/Lib/test/test_ioctl.py index a2873582cf982..7b7067eb7b61d 100644 --- a/Lib/test/test_ioctl.py +++ b/Lib/test/test_ioctl.py @@ -1,6 +1,7 @@ import array import unittest -from test.support import import_module, get_attribute +from test.support import get_attribute +from test.support.import_helper import import_module import os, struct fcntl = import_module('fcntl') termios = import_module('termios') diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py index a5c35dff79b8b..5ad306e0ed579 100644 --- a/Lib/test/test_platform.py +++ b/Lib/test/test_platform.py @@ -6,6 +6,7 @@ from unittest import mock from test import support +from test.support import os_helper class PlatformTest(unittest.TestCase): @@ -17,7 +18,7 @@ def clear_caches(self): def test_architecture(self): res = platform.architecture() - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_architecture_via_symlink(self): # issue3762 with support.PythonSymlink() as py: cmd = "-c", "import platform; print(platform.architecture())" @@ -281,8 +282,8 @@ def test_libc_ver(self): executable = sys.executable platform.libc_ver(executable) - filename = support.TESTFN - self.addCleanup(support.unlink, filename) + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) with mock.patch('os.confstr', create=True, return_value='mock 1.0'): # test os.confstr() code path diff --git a/Lib/test/test_pwd.py b/Lib/test/test_pwd.py index 85740cecd8229..f8f12571ca90e 100644 --- a/Lib/test/test_pwd.py +++ b/Lib/test/test_pwd.py @@ -1,8 +1,8 @@ import sys import unittest -from test import support +from test.support import import_helper -pwd = support.import_module('pwd') +pwd = import_helper.import_module('pwd') @unittest.skipUnless(hasattr(pwd, 'getpwall'), 'Does not have getpwall()') class PwdTest(unittest.TestCase): diff --git a/Lib/test/test_tix.py b/Lib/test/test_tix.py index e6ea3d07444ce..e6d759e7bd3b6 100644 --- a/Lib/test/test_tix.py +++ b/Lib/test/test_tix.py @@ -1,9 +1,10 @@ import unittest from test import support +from test.support import import_helper import sys # Skip this test if the _tkinter module wasn't built. -_tkinter = support.import_module('_tkinter') +_tkinter = import_helper.import_module('_tkinter') # Skip test if tk cannot be initialized. support.requires('gui') diff --git a/Lib/test/test_uu.py b/Lib/test/test_uu.py index c8709f7a0d666..e229e92085c25 100644 --- a/Lib/test/test_uu.py +++ b/Lib/test/test_uu.py @@ -4,7 +4,7 @@ """ import unittest -from test import support +from test.support import os_helper import os import stat @@ -174,10 +174,10 @@ def test_decode(self): class UUFileTest(unittest.TestCase): def setUp(self): - self.tmpin = support.TESTFN + "i" - self.tmpout = support.TESTFN + "o" - self.addCleanup(support.unlink, self.tmpin) - self.addCleanup(support.unlink, self.tmpout) + self.tmpin = os_helper.TESTFN + "i" + self.tmpout = os_helper.TESTFN + "o" + self.addCleanup(os_helper.unlink, self.tmpin) + self.addCleanup(os_helper.unlink, self.tmpout) def test_encode(self): with open(self.tmpin, 'wb') as fin: diff --git a/Lib/test/test_winreg.py b/Lib/test/test_winreg.py index 5c25ec8f7ec67..487abcd8d9531 100644 --- a/Lib/test/test_winreg.py +++ b/Lib/test/test_winreg.py @@ -4,11 +4,12 @@ import os, sys, errno import unittest from test import support +from test.support import import_helper import threading from platform import machine, win32_edition # Do this first so test will be skipped if module doesn't exist -support.import_module('winreg', required_on=['win']) +import_helper.import_module('winreg', required_on=['win']) # Now import everything from winreg import * From webhook-mailer at python.org Thu Jun 25 06:56:19 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 25 Jun 2020 10:56:19 -0000 Subject: [Python-checkins] bpo-39580: add check for CLI installation on macOS (GH-20271) Message-ID: https://github.com/python/cpython/commit/4564ebb1e62378d350d5483b0ae5d0d84082af2b commit: 4564ebb1e62378d350d5483b0ae5d0d84082af2b branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-25T03:56:14-07:00 summary: bpo-39580: add check for CLI installation on macOS (GH-20271) Adds a simple check for whether or not the package is being installed in the GUI or using installer on the command line. This addresses an issue where CLI-based software management tools (such as Munki) unexpectedly open Finder windows into a GUI session during installation runs. (cherry picked from commit 5f190d2cc60cd82a604cbffb58b6ca8f40350a7a) Co-authored-by: Rick Heil files: A Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst M Mac/BuildScript/scripts/postflight.documentation diff --git a/Mac/BuildScript/scripts/postflight.documentation b/Mac/BuildScript/scripts/postflight.documentation index 3cbbc1bf10ca2..ec48599cba76e 100755 --- a/Mac/BuildScript/scripts/postflight.documentation +++ b/Mac/BuildScript/scripts/postflight.documentation @@ -12,7 +12,9 @@ SHARE_DOCDIR_TO_FWK="../../.." # make link in /Applications/Python m.n/ for Finder users if [ -d "${APPDIR}" ]; then ln -fhs "${FWK_DOCDIR}/index.html" "${APPDIR}/Python Documentation.html" - open "${APPDIR}" || true # open the applications folder + if [ "${COMMAND_LINE_INSTALL}" != 1 ]; then + open "${APPDIR}" || true # open the applications folder + fi fi # make share/doc link in framework for command line users diff --git a/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst b/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst new file mode 100644 index 0000000000000..95d65359804d0 --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst @@ -0,0 +1,2 @@ +Avoid opening Finder window if running installer from the command line. +Patch contributed by Rick Heil. From webhook-mailer at python.org Thu Jun 25 07:21:30 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 25 Jun 2020 11:21:30 -0000 Subject: [Python-checkins] bpo-35773: Fix test_bdb on non-UTF-8 locales. (GH-21136) Message-ID: https://github.com/python/cpython/commit/94eee69e9b3a7e7d33142a47ffea560beb8f1596 commit: 94eee69e9b3a7e7d33142a47ffea560beb8f1596 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-25T14:21:25+03:00 summary: bpo-35773: Fix test_bdb on non-UTF-8 locales. (GH-21136) files: M Lib/test/test_bdb.py diff --git a/Lib/test/test_bdb.py b/Lib/test/test_bdb.py index 6e82cce1f411b..ae16880567882 100644 --- a/Lib/test/test_bdb.py +++ b/Lib/test/test_bdb.py @@ -726,7 +726,7 @@ def main(): ('line', 2, 'tfunc_import'), ('step', ), ('line', 3, 'tfunc_import'), ('quit', ), ] - skip = ('importlib*', 'zipimport', TEST_MODULE) + skip = ('importlib*', 'zipimport', 'encodings.*', TEST_MODULE) with TracerRun(self, skip=skip) as tracer: tracer.runcall(tfunc_import) From webhook-mailer at python.org Thu Jun 25 07:22:15 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 25 Jun 2020 11:22:15 -0000 Subject: [Python-checkins] bpo-40204: Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. (GH-21141) Message-ID: https://github.com/python/cpython/commit/589e8fe07934a8585d6c5c31d12ae5f766b8fec7 commit: 589e8fe07934a8585d6c5c31d12ae5f766b8fec7 branch: master author: Ned Deily committer: GitHub date: 2020-06-25T07:22:10-04:00 summary: bpo-40204: Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. (GH-21141) files: A Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst M Doc/Makefile diff --git a/Doc/Makefile b/Doc/Makefile index 05361f2ee2c82..b8ca1edfbc60a 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -143,7 +143,7 @@ clean: venv: $(PYTHON) -m venv $(VENVDIR) $(VENVDIR)/bin/python3 -m pip install -U pip setuptools - $(VENVDIR)/bin/python3 -m pip install -U Sphinx==2.2.0 blurb python-docs-theme + $(VENVDIR)/bin/python3 -m pip install -U Sphinx==2.3.1 blurb python-docs-theme @echo "The venv has been created in the $(VENVDIR) directory" dist: diff --git a/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst b/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst new file mode 100644 index 0000000000000..25a6d751e5f45 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst @@ -0,0 +1 @@ +Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. From webhook-mailer at python.org Thu Jun 25 07:31:43 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 25 Jun 2020 11:31:43 -0000 Subject: [Python-checkins] bpo-41112: Fix test_peg_generator on non-UTF-8 locales. (GH-21138) Message-ID: https://github.com/python/cpython/commit/aad8f0eeca93b2150760b5e59ed0495e47d1be1e commit: aad8f0eeca93b2150760b5e59ed0495e47d1be1e branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-25T14:31:30+03:00 summary: bpo-41112: Fix test_peg_generator on non-UTF-8 locales. (GH-21138) files: M Lib/test/test_peg_generator/test_c_parser.py diff --git a/Lib/test/test_peg_generator/test_c_parser.py b/Lib/test/test_peg_generator/test_c_parser.py index a5d88501f77ad..f9935258c861e 100644 --- a/Lib/test/test_peg_generator/test_c_parser.py +++ b/Lib/test/test_peg_generator/test_c_parser.py @@ -365,8 +365,8 @@ def test_syntax_error_for_string(self) -> None: start: expr+ NEWLINE? ENDMARKER expr: NAME """ - test_source = """ - for text in ("a b 42 b a", "? ? 42 ? ?"): + test_source = r""" + for text in ("a b 42 b a", "\u540d \u540d 42 \u540d \u540d"): try: parse.parse_string(text, mode=0) except SyntaxError as e: From webhook-mailer at python.org Thu Jun 25 07:37:43 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 25 Jun 2020 11:37:43 -0000 Subject: [Python-checkins] bpo-35773: Fix test_bdb on non-UTF-8 locales. (GH-21136) Message-ID: https://github.com/python/cpython/commit/84f9c23a12eb091ac5327ebcba0d63794085b7f8 commit: 84f9c23a12eb091ac5327ebcba0d63794085b7f8 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-25T04:37:38-07:00 summary: bpo-35773: Fix test_bdb on non-UTF-8 locales. (GH-21136) (cherry picked from commit 94eee69e9b3a7e7d33142a47ffea560beb8f1596) Co-authored-by: Serhiy Storchaka files: M Lib/test/test_bdb.py diff --git a/Lib/test/test_bdb.py b/Lib/test/test_bdb.py index 6e82cce1f411b..ae16880567882 100644 --- a/Lib/test/test_bdb.py +++ b/Lib/test/test_bdb.py @@ -726,7 +726,7 @@ def main(): ('line', 2, 'tfunc_import'), ('step', ), ('line', 3, 'tfunc_import'), ('quit', ), ] - skip = ('importlib*', 'zipimport', TEST_MODULE) + skip = ('importlib*', 'zipimport', 'encodings.*', TEST_MODULE) with TracerRun(self, skip=skip) as tracer: tracer.runcall(tfunc_import) From webhook-mailer at python.org Thu Jun 25 07:43:38 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 25 Jun 2020 11:43:38 -0000 Subject: [Python-checkins] bpo-41113: Fix test_warnings on non-Western locales. (GH-21143) Message-ID: https://github.com/python/cpython/commit/0f8ec1fff01173803645ad6a8aea24997bf66fc1 commit: 0f8ec1fff01173803645ad6a8aea24997bf66fc1 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-25T14:43:33+03:00 summary: bpo-41113: Fix test_warnings on non-Western locales. (GH-21143) files: M Lib/test/test_warnings/__init__.py diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py index 268ecb03f4dc6..65d0b557104bd 100644 --- a/Lib/test/test_warnings/__init__.py +++ b/Lib/test/test_warnings/__init__.py @@ -1198,13 +1198,13 @@ def test_default_filter_configuration(self): @unittest.skipUnless(sys.getfilesystemencoding() != 'ascii', 'requires non-ascii filesystemencoding') def test_nonascii(self): + PYTHONWARNINGS="ignore:DeprecationWarning" + (support.FS_NONASCII or '') rc, stdout, stderr = assert_python_ok("-c", "import sys; sys.stdout.write(str(sys.warnoptions))", PYTHONIOENCODING="utf-8", - PYTHONWARNINGS="ignore:Deprecaci?nWarning", + PYTHONWARNINGS=PYTHONWARNINGS, PYTHONDEVMODE="") - self.assertEqual(stdout, - "['ignore:Deprecaci?nWarning']".encode('utf-8')) + self.assertEqual(stdout, str([PYTHONWARNINGS]).encode()) class CEnvironmentVariableTests(EnvironmentVariableTests, unittest.TestCase): module = c_warnings From webhook-mailer at python.org Thu Jun 25 07:44:19 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 25 Jun 2020 11:44:19 -0000 Subject: [Python-checkins] bpo-40204: Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. (GH-21141) (GH-21147) Message-ID: https://github.com/python/cpython/commit/7318f0ab234e1c60a19eeea4e5968fb5d4fd70af commit: 7318f0ab234e1c60a19eeea4e5968fb5d4fd70af branch: 3.8 author: Ned Deily committer: GitHub date: 2020-06-25T07:44:14-04:00 summary: bpo-40204: Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. (GH-21141) (GH-21147) files: A Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst M Doc/Makefile diff --git a/Doc/Makefile b/Doc/Makefile index 2169f1a369549..b8ca1edfbc60a 100644 --- a/Doc/Makefile +++ b/Doc/Makefile @@ -143,7 +143,7 @@ clean: venv: $(PYTHON) -m venv $(VENVDIR) $(VENVDIR)/bin/python3 -m pip install -U pip setuptools - $(VENVDIR)/bin/python3 -m pip install -U Sphinx==1.8.2 blurb python-docs-theme + $(VENVDIR)/bin/python3 -m pip install -U Sphinx==2.3.1 blurb python-docs-theme @echo "The venv has been created in the $(VENVDIR) directory" dist: diff --git a/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst b/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst new file mode 100644 index 0000000000000..25a6d751e5f45 --- /dev/null +++ b/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst @@ -0,0 +1 @@ +Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. From webhook-mailer at python.org Thu Jun 25 08:05:01 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 25 Jun 2020 12:05:01 -0000 Subject: [Python-checkins] bpo-41113: Fix test_warnings on non-Western locales. (GH-21143) Message-ID: https://github.com/python/cpython/commit/f547d06ea7db23bb0a2cf81ff85478746efa958e commit: f547d06ea7db23bb0a2cf81ff85478746efa958e branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-25T05:04:56-07:00 summary: bpo-41113: Fix test_warnings on non-Western locales. (GH-21143) (cherry picked from commit 0f8ec1fff01173803645ad6a8aea24997bf66fc1) Co-authored-by: Serhiy Storchaka files: M Lib/test/test_warnings/__init__.py diff --git a/Lib/test/test_warnings/__init__.py b/Lib/test/test_warnings/__init__.py index 985adc1cda781..7b0d06b320197 100644 --- a/Lib/test/test_warnings/__init__.py +++ b/Lib/test/test_warnings/__init__.py @@ -1199,13 +1199,13 @@ def test_default_filter_configuration(self): @unittest.skipUnless(sys.getfilesystemencoding() != 'ascii', 'requires non-ascii filesystemencoding') def test_nonascii(self): + PYTHONWARNINGS="ignore:DeprecationWarning" + (support.FS_NONASCII or '') rc, stdout, stderr = assert_python_ok("-c", "import sys; sys.stdout.write(str(sys.warnoptions))", PYTHONIOENCODING="utf-8", - PYTHONWARNINGS="ignore:Deprecaci?nWarning", + PYTHONWARNINGS=PYTHONWARNINGS, PYTHONDEVMODE="") - self.assertEqual(stdout, - "['ignore:Deprecaci?nWarning']".encode('utf-8')) + self.assertEqual(stdout, str([PYTHONWARNINGS]).encode()) class CEnvironmentVariableTests(EnvironmentVariableTests, unittest.TestCase): module = c_warnings From webhook-mailer at python.org Thu Jun 25 08:07:47 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Thu, 25 Jun 2020 12:07:47 -0000 Subject: [Python-checkins] bpo-40521: Optimize PyBytes_FromStringAndSize(str, 0) (GH-21142) Message-ID: https://github.com/python/cpython/commit/91698d8caa4b5bb6e8dbb64b156e8afe9e32cac1 commit: 91698d8caa4b5bb6e8dbb64b156e8afe9e32cac1 branch: master author: Victor Stinner committer: GitHub date: 2020-06-25T14:07:40+02:00 summary: bpo-40521: Optimize PyBytes_FromStringAndSize(str, 0) (GH-21142) Always create the empty bytes string singleton. Optimize PyBytes_FromStringAndSize(str, 0): it no longer has to check if the empty string singleton was created or not, it is always available. Add functions: * _PyBytes_Init() * bytes_get_empty(), bytes_new_empty() * bytes_create_empty_string_singleton() * unicode_create_empty_string_singleton() _Py_unicode_state: rename empty structure member to empty_string. files: M Include/internal/pycore_interp.h M Include/internal/pycore_pylifecycle.h M Objects/bytesobject.c M Objects/unicodeobject.c M Python/pylifecycle.c diff --git a/Include/internal/pycore_interp.h b/Include/internal/pycore_interp.h index bf1769e5ce2c2..cfc27470c8041 100644 --- a/Include/internal/pycore_interp.h +++ b/Include/internal/pycore_interp.h @@ -66,13 +66,13 @@ struct _Py_unicode_fs_codec { }; struct _Py_bytes_state { + PyObject *empty_string; PyBytesObject *characters[256]; - PyBytesObject *empty_string; }; struct _Py_unicode_state { // The empty Unicode object is a singleton to improve performance. - PyObject *empty; + PyObject *empty_string; /* Single character Unicode strings in the Latin-1 range are being shared as well. */ PyObject *latin1[256]; diff --git a/Include/internal/pycore_pylifecycle.h b/Include/internal/pycore_pylifecycle.h index 3b2173787118f..bffc95b27e946 100644 --- a/Include/internal/pycore_pylifecycle.h +++ b/Include/internal/pycore_pylifecycle.h @@ -32,6 +32,7 @@ PyAPI_FUNC(int) _Py_IsLocaleCoercionTarget(const char *ctype_loc); /* Various one-time initializers */ extern PyStatus _PyUnicode_Init(PyThreadState *tstate); +extern PyStatus _PyBytes_Init(PyThreadState *tstate); extern int _PyStructSequence_Init(void); extern int _PyLong_Init(PyThreadState *tstate); extern PyStatus _PyTuple_Init(PyThreadState *tstate); diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index ce006e15dce9e..782bc8e1fa0b7 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -4,8 +4,9 @@ #include "Python.h" #include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_bytes_methods.h" -#include "pycore_object.h" +#include "pycore_bytes_methods.h" // _Py_bytes_startswith() +#include "pycore_initconfig.h" // _PyStatus_OK() +#include "pycore_object.h" // _PyObject_GC_TRACK #include "pycore_pymem.h" // PYMEM_CLEANBYTE #include "pystrhex.h" @@ -41,6 +42,44 @@ get_bytes_state(void) } +// Return a borrowed reference to the empty bytes string singleton. +static inline PyObject* bytes_get_empty(void) +{ + struct _Py_bytes_state *state = get_bytes_state(); + // bytes_get_empty() must not be called before _PyBytes_Init() + // or after _PyBytes_Fini() + assert(state->empty_string != NULL); + return state->empty_string; +} + + +// Return a strong reference to the empty bytes string singleton. +static inline PyObject* bytes_new_empty(void) +{ + PyObject *empty = bytes_get_empty(); + Py_INCREF(empty); + return (PyObject *)empty; +} + + +static int +bytes_create_empty_string_singleton(struct _Py_bytes_state *state) +{ + // Create the empty bytes string singleton + PyBytesObject *op = (PyBytesObject *)PyObject_Malloc(PyBytesObject_SIZE); + if (op == NULL) { + return -1; + } + _PyObject_InitVar((PyVarObject*)op, &PyBytes_Type, 0); + op->ob_shash = -1; + op->ob_sval[0] = '\0'; + + assert(state->empty_string == NULL); + state->empty_string = (PyObject *)op; + return 0; +} + + /* For PyBytes_FromString(), the parameter `str' points to a null-terminated string containing exactly `size' bytes. @@ -70,12 +109,7 @@ _PyBytes_FromSize(Py_ssize_t size, int use_calloc) assert(size >= 0); if (size == 0) { - struct _Py_bytes_state *state = get_bytes_state(); - op = state->empty_string; - if (op != NULL) { - Py_INCREF(op); - return (PyObject *)op; - } + return bytes_new_empty(); } if ((size_t)size > (size_t)PY_SSIZE_T_MAX - PyBytesObject_SIZE) { @@ -94,13 +128,8 @@ _PyBytes_FromSize(Py_ssize_t size, int use_calloc) } _PyObject_InitVar((PyVarObject*)op, &PyBytes_Type, size); op->ob_shash = -1; - if (!use_calloc) + if (!use_calloc) { op->ob_sval[size] = '\0'; - /* empty byte string singleton */ - if (size == 0) { - struct _Py_bytes_state *state = get_bytes_state(); - Py_INCREF(op); - state->empty_string = op; } return (PyObject *) op; } @@ -122,6 +151,9 @@ PyBytes_FromStringAndSize(const char *str, Py_ssize_t size) return (PyObject *)op; } } + if (size == 0) { + return bytes_new_empty(); + } op = (PyBytesObject *)_PyBytes_FromSize(size, 0); if (op == NULL) @@ -155,11 +187,7 @@ PyBytes_FromString(const char *str) struct _Py_bytes_state *state = get_bytes_state(); if (size == 0) { - op = state->empty_string; - if (op != NULL) { - Py_INCREF(op); - return (PyObject *)op; - } + return bytes_new_empty(); } else if (size == 1) { op = state->characters[*str & UCHAR_MAX]; @@ -178,11 +206,8 @@ PyBytes_FromString(const char *str) op->ob_shash = -1; memcpy(op->ob_sval, str, size+1); /* share short strings */ - if (size == 0) { - Py_INCREF(op); - state->empty_string = op; - } - else if (size == 1) { + if (size == 1) { + assert(state->characters[*str & UCHAR_MAX] == NULL); Py_INCREF(op); state->characters[*str & UCHAR_MAX] = op; } @@ -1272,7 +1297,7 @@ PyBytes_AsStringAndSize(PyObject *obj, /* -------------------------------------------------------------------- */ /* Methods */ -#define STRINGLIB_GET_EMPTY() get_bytes_state()->empty_string +#define STRINGLIB_GET_EMPTY() bytes_get_empty() #include "stringlib/stringdefs.h" @@ -3053,9 +3078,9 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) goto error; } if (newsize == 0) { - *pv = _PyBytes_FromSize(0, 0); + *pv = bytes_new_empty(); Py_DECREF(v); - return (*pv == NULL) ? -1 : 0; + return 0; } /* XXX UNREF/NEWREF interface should be more symmetrical */ #ifdef Py_REF_DEBUG @@ -3084,6 +3109,18 @@ _PyBytes_Resize(PyObject **pv, Py_ssize_t newsize) return -1; } + +PyStatus +_PyBytes_Init(PyThreadState *tstate) +{ + struct _Py_bytes_state *state = &tstate->interp->bytes; + if (bytes_create_empty_string_singleton(state) < 0) { + return _PyStatus_NO_MEMORY(); + } + return _PyStatus_OK(); +} + + void _PyBytes_Fini(PyThreadState *tstate) { diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 5ba99514d2969..55c886727ba2e 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -41,16 +41,15 @@ OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. #define PY_SSIZE_T_CLEAN #include "Python.h" #include "pycore_abstract.h" // _PyIndex_Check() -#include "pycore_bytes_methods.h" -#include "pycore_fileutils.h" -#include "pycore_initconfig.h" +#include "pycore_bytes_methods.h" // _Py_bytes_lower() +#include "pycore_initconfig.h" // _PyStatus_OK() #include "pycore_interp.h" // PyInterpreterState.fs_codec -#include "pycore_object.h" -#include "pycore_pathconfig.h" -#include "pycore_pylifecycle.h" +#include "pycore_object.h" // _PyObject_GC_TRACK() +#include "pycore_pathconfig.h" // _Py_DumpPathConfig() +#include "pycore_pylifecycle.h" // _Py_SetFileSystemEncoding() #include "pycore_pystate.h" // _PyInterpreterState_GET() -#include "ucnhash.h" -#include "stringlib/eq.h" +#include "ucnhash.h" // _PyUnicode_Name_CAPI +#include "stringlib/eq.h" // unicode_eq() #ifdef MS_WINDOWS #include @@ -236,10 +235,12 @@ static inline PyObject* unicode_get_empty(void) struct _Py_unicode_state *state = get_unicode_state(); // unicode_get_empty() must not be called before _PyUnicode_Init() // or after _PyUnicode_Fini() - assert(state->empty != NULL); - return state->empty; + assert(state->empty_string != NULL); + return state->empty_string; } + +// Return a strong reference to the empty string singleton. static inline PyObject* unicode_new_empty(void) { PyObject *empty = unicode_get_empty(); @@ -1385,6 +1386,26 @@ _PyUnicode_Dump(PyObject *op) } #endif +static int +unicode_create_empty_string_singleton(struct _Py_unicode_state *state) +{ + // Use size=1 rather than size=0, so PyUnicode_New(0, maxchar) can be + // optimized to always use state->empty_string without having to check if + // it is NULL or not. + PyObject *empty = PyUnicode_New(1, 0); + if (empty == NULL) { + return -1; + } + PyUnicode_1BYTE_DATA(empty)[0] = 0; + _PyUnicode_LENGTH(empty) = 0; + assert(_PyUnicode_CheckConsistency(empty, 1)); + + assert(state->empty_string == NULL); + state->empty_string = empty; + return 0; +} + + PyObject * PyUnicode_New(Py_ssize_t size, Py_UCS4 maxchar) { @@ -1972,7 +1993,7 @@ static int unicode_is_singleton(PyObject *unicode) { struct _Py_unicode_state *state = get_unicode_state(); - if (unicode == state->empty) { + if (unicode == state->empty_string) { return 1; } PyASCIIObject *ascii = (PyASCIIObject *)unicode; @@ -15542,20 +15563,10 @@ _PyUnicode_Init(PyThreadState *tstate) 0x2029, /* PARAGRAPH SEPARATOR */ }; - // Use size=1 rather than size=0, so PyUnicode_New(0, maxchar) can be - // optimized to always use state->empty without having to check if it is - // NULL or not. - PyObject *empty = PyUnicode_New(1, 0); - if (empty == NULL) { + struct _Py_unicode_state *state = &tstate->interp->unicode; + if (unicode_create_empty_string_singleton(state) < 0) { return _PyStatus_NO_MEMORY(); } - PyUnicode_1BYTE_DATA(empty)[0] = 0; - _PyUnicode_LENGTH(empty) = 0; - assert(_PyUnicode_CheckConsistency(empty, 1)); - - struct _Py_unicode_state *state = &tstate->interp->unicode; - assert(state->empty == NULL); - state->empty = empty; if (_Py_IsMainInterpreter(tstate)) { /* initialize the linebreak bloom filter */ @@ -16223,7 +16234,7 @@ _PyUnicode_Fini(PyThreadState *tstate) #endif /* __INSURE__ */ } - Py_CLEAR(state->empty); + Py_CLEAR(state->empty_string); for (Py_ssize_t i = 0; i < 256; i++) { Py_CLEAR(state->latin1[i]); diff --git a/Python/pylifecycle.c b/Python/pylifecycle.c index 4b658f847bc12..cd993ea13418f 100644 --- a/Python/pylifecycle.c +++ b/Python/pylifecycle.c @@ -607,6 +607,11 @@ pycore_init_types(PyThreadState *tstate) return status; } + status = _PyBytes_Init(tstate); + if (_PyStatus_EXCEPTION(status)) { + return status; + } + status = _PyExc_Init(tstate); if (_PyStatus_EXCEPTION(status)) { return status; From webhook-mailer at python.org Thu Jun 25 08:11:18 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Thu, 25 Jun 2020 12:11:18 -0000 Subject: [Python-checkins] [3.7] bpo-41009: fix requires_OS_version() class decorator (GH-20942) (GH-20949) Message-ID: https://github.com/python/cpython/commit/d3798ed9f1762dcf632369505d517f476eccde13 commit: d3798ed9f1762dcf632369505d517f476eccde13 branch: 3.7 author: Christian Heimes committer: GitHub date: 2020-06-25T08:11:13-04:00 summary: [3.7] bpo-41009: fix requires_OS_version() class decorator (GH-20942) (GH-20949) Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran. (cherry picked from commit bb6ec14479f18c32e71e43f2785f177aa17aabbd) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index b198c2ca76391..b78451b9e6c56 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -560,25 +560,25 @@ def _requires_unix_version(sysname, min_version): For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if the FreeBSD version is less than 7.2. """ - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kw): - if platform.system() == sysname: - version_txt = platform.release().split('-', 1)[0] - try: - version = tuple(map(int, version_txt.split('.'))) - except ValueError: - pass - else: - if version < min_version: - min_version_txt = '.'.join(map(str, min_version)) - raise unittest.SkipTest( - "%s version %s or higher required, not %s" - % (sysname, min_version_txt, version_txt)) - return func(*args, **kw) - wrapper.min_version = min_version - return wrapper - return decorator + import platform + min_version_txt = '.'.join(map(str, min_version)) + version_txt = platform.release().split('-', 1)[0] + if platform.system() == sysname: + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + skip = False + else: + skip = version < min_version + else: + skip = False + + return unittest.skipIf( + skip, + f"{sysname} version {min_version_txt} or higher required, not " + f"{version_txt}" + ) + def requires_freebsd_version(*min_version): """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst new file mode 100644 index 0000000000000..1208c119a3556 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst @@ -0,0 +1,2 @@ +Fix use of ``support.require_{linux|mac|freebsd}_version()`` decorators as +class decorator. From webhook-mailer at python.org Thu Jun 25 08:15:45 2020 From: webhook-mailer at python.org (Hai Shi) Date: Thu, 25 Jun 2020 12:15:45 -0000 Subject: [Python-checkins] bpo-40275: Use new test.support helper submodules in tests (GH-20824) Message-ID: https://github.com/python/cpython/commit/06a40d735939fd7d5cb77a68a6e18299b6484fa5 commit: 06a40d735939fd7d5cb77a68a6e18299b6484fa5 branch: master author: Hai Shi committer: GitHub date: 2020-06-25T14:15:40+02:00 summary: bpo-40275: Use new test.support helper submodules in tests (GH-20824) files: M Lib/test/support/socket_helper.py M Lib/test/test_asyncio/__init__.py M Lib/test/test_asyncio/test_base_events.py M Lib/test/test_asyncio/test_sendfile.py M Lib/test/test_asyncio/test_subprocess.py M Lib/test/test_asyncio/test_unix_events.py M Lib/test/test_getopt.py M Lib/test/test_tcl.py M Lib/test/test_xmlrpc.py diff --git a/Lib/test/support/socket_helper.py b/Lib/test/support/socket_helper.py index f709ffd40dd8a..7070c12c253f6 100644 --- a/Lib/test/support/socket_helper.py +++ b/Lib/test/support/socket_helper.py @@ -146,7 +146,7 @@ def skip_unless_bind_unix_socket(test): return unittest.skip('No UNIX Sockets')(test) global _bind_nix_socket_error if _bind_nix_socket_error is None: - from test.support import TESTFN, unlink + from .os_helper import TESTFN, unlink path = TESTFN + "can_bind_unix_socket" with socket.socket(socket.AF_UNIX) as sock: try: diff --git a/Lib/test/test_asyncio/__init__.py b/Lib/test/test_asyncio/__init__.py index c77c7a81278be..5d415044d7dc6 100644 --- a/Lib/test/test_asyncio/__init__.py +++ b/Lib/test/test_asyncio/__init__.py @@ -1,8 +1,10 @@ import os -from test.support import load_package_tests, import_module +from test.support import load_package_tests +from test.support import import_helper + # Skip tests if we don't have concurrent.futures. -import_module('concurrent.futures') +import_helper.import_module('concurrent.futures') def load_tests(*args): return load_package_tests(os.path.dirname(__file__), *args) diff --git a/Lib/test/test_asyncio/test_base_events.py b/Lib/test/test_asyncio/test_base_events.py index 533d5cc7f5038..f74dabc2db945 100644 --- a/Lib/test/test_asyncio/test_base_events.py +++ b/Lib/test/test_asyncio/test_base_events.py @@ -16,6 +16,7 @@ from test.test_asyncio import utils as test_utils from test import support from test.support.script_helper import assert_python_ok +from test.support import os_helper from test.support import socket_helper @@ -1983,14 +1984,14 @@ async def wait_closed(self): def setUpClass(cls): cls.__old_bufsize = constants.SENDFILE_FALLBACK_READBUFFER_SIZE constants.SENDFILE_FALLBACK_READBUFFER_SIZE = 1024 * 16 - with open(support.TESTFN, 'wb') as fp: + with open(os_helper.TESTFN, 'wb') as fp: fp.write(cls.DATA) super().setUpClass() @classmethod def tearDownClass(cls): constants.SENDFILE_FALLBACK_READBUFFER_SIZE = cls.__old_bufsize - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) super().tearDownClass() def setUp(self): @@ -1998,7 +1999,7 @@ def setUp(self): # BaseSelectorEventLoop() has no native implementation self.loop = BaseSelectorEventLoop() self.set_event_loop(self.loop) - self.file = open(support.TESTFN, 'rb') + self.file = open(os_helper.TESTFN, 'rb') self.addCleanup(self.file.close) super().setUp() @@ -2095,7 +2096,7 @@ def test_blocking_socket(self): def test_nonbinary_file(self): sock = self.make_socket() - with open(support.TESTFN, 'r') as f: + with open(os_helper.TESTFN, 'r') as f: with self.assertRaisesRegex(ValueError, "binary mode"): self.run_loop(self.loop.sock_sendfile(sock, f)) diff --git a/Lib/test/test_asyncio/test_sendfile.py b/Lib/test/test_asyncio/test_sendfile.py index dbce199a9b8e1..a30d9b9b4d9a0 100644 --- a/Lib/test/test_asyncio/test_sendfile.py +++ b/Lib/test/test_asyncio/test_sendfile.py @@ -10,6 +10,7 @@ from asyncio import constants from unittest import mock from test import support +from test.support import os_helper from test.support import socket_helper from test.test_asyncio import utils as test_utils @@ -98,17 +99,17 @@ def create_event_loop(self): @classmethod def setUpClass(cls): - with open(support.TESTFN, 'wb') as fp: + with open(os_helper.TESTFN, 'wb') as fp: fp.write(cls.DATA) super().setUpClass() @classmethod def tearDownClass(cls): - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) super().tearDownClass() def setUp(self): - self.file = open(support.TESTFN, 'rb') + self.file = open(os_helper.TESTFN, 'rb') self.addCleanup(self.file.close) self.loop = self.create_event_loop() self.set_event_loop(self.loop) diff --git a/Lib/test/test_asyncio/test_subprocess.py b/Lib/test/test_asyncio/test_subprocess.py index 6657a88e657c2..177a02cdcc174 100644 --- a/Lib/test/test_asyncio/test_subprocess.py +++ b/Lib/test/test_asyncio/test_subprocess.py @@ -10,6 +10,7 @@ from asyncio import subprocess from test.test_asyncio import utils as test_utils from test import support +from test.support import os_helper if sys.platform != 'win32': from asyncio import unix_events @@ -626,10 +627,10 @@ async def execute(): def test_create_subprocess_exec_with_path(self): async def execute(): p = await subprocess.create_subprocess_exec( - support.FakePath(sys.executable), '-c', 'pass') + os_helper.FakePath(sys.executable), '-c', 'pass') await p.wait() p = await subprocess.create_subprocess_exec( - sys.executable, '-c', 'pass', support.FakePath('.')) + sys.executable, '-c', 'pass', os_helper.FakePath('.')) await p.wait() self.assertIsNone(self.loop.run_until_complete(execute())) @@ -737,7 +738,7 @@ async def execute(): with self.assertRaises(RuntimeError): await subprocess.create_subprocess_exec( - support.FakePath(sys.executable), '-c', 'pass') + os_helper.FakePath(sys.executable), '-c', 'pass') watcher.add_child_handler.assert_not_called() diff --git a/Lib/test/test_asyncio/test_unix_events.py b/Lib/test/test_asyncio/test_unix_events.py index 10bd46dea1991..2c7d52a15bb72 100644 --- a/Lib/test/test_asyncio/test_unix_events.py +++ b/Lib/test/test_asyncio/test_unix_events.py @@ -14,7 +14,7 @@ import threading import unittest from unittest import mock -from test import support +from test.support import os_helper from test.support import socket_helper if sys.platform == 'win32': @@ -467,19 +467,19 @@ async def wait_closed(self): @classmethod def setUpClass(cls): - with open(support.TESTFN, 'wb') as fp: + with open(os_helper.TESTFN, 'wb') as fp: fp.write(cls.DATA) super().setUpClass() @classmethod def tearDownClass(cls): - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) super().tearDownClass() def setUp(self): self.loop = asyncio.new_event_loop() self.set_event_loop(self.loop) - self.file = open(support.TESTFN, 'rb') + self.file = open(os_helper.TESTFN, 'rb') self.addCleanup(self.file.close) super().setUp() diff --git a/Lib/test/test_getopt.py b/Lib/test/test_getopt.py index 9275dc4c5bab1..9261276ebb972 100644 --- a/Lib/test/test_getopt.py +++ b/Lib/test/test_getopt.py @@ -1,7 +1,8 @@ # test_getopt.py # David Goodger 2000-08-19 -from test.support import verbose, run_doctest, EnvironmentVarGuard +from test.support import verbose, run_doctest +from test.support.os_helper import EnvironmentVarGuard import unittest import getopt diff --git a/Lib/test/test_tcl.py b/Lib/test/test_tcl.py index 1c5b9cf2bd2a8..db982dac8d653 100644 --- a/Lib/test/test_tcl.py +++ b/Lib/test/test_tcl.py @@ -5,9 +5,10 @@ import os import warnings from test import support +from test.support import import_helper # Skip this test if the _tkinter module wasn't built. -_tkinter = support.import_module('_tkinter') +_tkinter = import_helper.import_module('_tkinter') import tkinter from tkinter import Tcl diff --git a/Lib/test/test_xmlrpc.py b/Lib/test/test_xmlrpc.py index 79f702d0a75d3..3dfa84bf77db0 100644 --- a/Lib/test/test_xmlrpc.py +++ b/Lib/test/test_xmlrpc.py @@ -15,6 +15,7 @@ import io import contextlib from test import support +from test.support import os_helper from test.support import socket_helper from test.support import threading_helper from test.support import ALWAYS_EQ, LARGEST, SMALLEST @@ -1372,7 +1373,7 @@ def tearDown(self): self.cgi = None def test_cgi_get(self): - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env['REQUEST_METHOD'] = 'GET' # if the method is GET and no request_text is given, it runs handle_get # get sysout output @@ -1404,7 +1405,7 @@ def test_cgi_xmlrpc_response(self): """ - with support.EnvironmentVarGuard() as env, \ + with os_helper.EnvironmentVarGuard() as env, \ captured_stdout(encoding=self.cgi.encoding) as data_out, \ support.captured_stdin() as data_in: data_in.write(data) From webhook-mailer at python.org Thu Jun 25 08:18:48 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Thu, 25 Jun 2020 12:18:48 -0000 Subject: [Python-checkins] [3.8] bpo-41009: fix requires_OS_version() class decorator (GH-20942) (GH-20948) Message-ID: https://github.com/python/cpython/commit/8075fe199b0569428cebaf213663bdd1ca40d792 commit: 8075fe199b0569428cebaf213663bdd1ca40d792 branch: 3.8 author: Christian Heimes committer: GitHub date: 2020-06-25T05:18:43-07:00 summary: [3.8] bpo-41009: fix requires_OS_version() class decorator (GH-20942) (GH-20948) Signed-off-by: Christian Heimes Automerge-Triggered-By: @tiran. (cherry picked from commit bb6ec14479f18c32e71e43f2785f177aa17aabbd) Co-authored-by: Christian Heimes files: A Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst M Lib/test/support/__init__.py diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 3d287a98ac22d..937766b8ce725 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -586,25 +586,25 @@ def _requires_unix_version(sysname, min_version): For example, @_requires_unix_version('FreeBSD', (7, 2)) raises SkipTest if the FreeBSD version is less than 7.2. """ - def decorator(func): - @functools.wraps(func) - def wrapper(*args, **kw): - if platform.system() == sysname: - version_txt = platform.release().split('-', 1)[0] - try: - version = tuple(map(int, version_txt.split('.'))) - except ValueError: - pass - else: - if version < min_version: - min_version_txt = '.'.join(map(str, min_version)) - raise unittest.SkipTest( - "%s version %s or higher required, not %s" - % (sysname, min_version_txt, version_txt)) - return func(*args, **kw) - wrapper.min_version = min_version - return wrapper - return decorator + import platform + min_version_txt = '.'.join(map(str, min_version)) + version_txt = platform.release().split('-', 1)[0] + if platform.system() == sysname: + try: + version = tuple(map(int, version_txt.split('.'))) + except ValueError: + skip = False + else: + skip = version < min_version + else: + skip = False + + return unittest.skipIf( + skip, + f"{sysname} version {min_version_txt} or higher required, not " + f"{version_txt}" + ) + def requires_freebsd_version(*min_version): """Decorator raising SkipTest if the OS is FreeBSD and the FreeBSD version is diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst new file mode 100644 index 0000000000000..1208c119a3556 --- /dev/null +++ b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst @@ -0,0 +1,2 @@ +Fix use of ``support.require_{linux|mac|freebsd}_version()`` decorators as +class decorator. From webhook-mailer at python.org Thu Jun 25 10:46:54 2020 From: webhook-mailer at python.org (Ned Deily) Date: Thu, 25 Jun 2020 14:46:54 -0000 Subject: [Python-checkins] bpo-40939: run autoreconf to fix configure{, .ac} disparity (GH-21152) Message-ID: https://github.com/python/cpython/commit/8d02f91dc6139a13b6efa9bd5a5b4bdd7ddcc29d commit: 8d02f91dc6139a13b6efa9bd5a5b4bdd7ddcc29d branch: master author: Ned Deily committer: GitHub date: 2020-06-25T10:46:44-04:00 summary: bpo-40939: run autoreconf to fix configure{,.ac} disparity (GH-21152) files: M configure diff --git a/configure b/configure index dc590c2e98273..c51f396824b63 100755 --- a/configure +++ b/configure @@ -16806,7 +16806,7 @@ do done -SRCDIRS="Parser Parser/pegen Objects Python Modules Modules/_io Programs" +SRCDIRS="Parser Objects Python Modules Modules/_io Programs" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for build directories" >&5 $as_echo_n "checking for build directories... " >&6; } for dir in $SRCDIRS; do From webhook-mailer at python.org Thu Jun 25 10:55:58 2020 From: webhook-mailer at python.org (Ronald Oussoren) Date: Thu, 25 Jun 2020 14:55:58 -0000 Subject: [Python-checkins] BPO-41100: Support macOS 11 when building (GH-21113) Message-ID: https://github.com/python/cpython/commit/8ea6353f60625c96ce96588c70ff24a77f8c71f9 commit: 8ea6353f60625c96ce96588c70ff24a77f8c71f9 branch: master author: Ronald Oussoren committer: GitHub date: 2020-06-25T10:55:48-04:00 summary: BPO-41100: Support macOS 11 when building (GH-21113) files: A Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst new file mode 100644 index 0000000000000..ded66b567a92d --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst @@ -0,0 +1 @@ +Support macOS 11 when building. diff --git a/configure b/configure index c51f396824b63..5024860ca4395 100755 --- a/configure +++ b/configure @@ -3426,7 +3426,7 @@ $as_echo "#define _BSD_SOURCE 1" >>confdefs.h # has no effect, don't bother defining them Darwin/[6789].*) define_xopen_source=no;; - Darwin/1[0-9].*) + Darwin/[12][0-9].*) define_xopen_source=no;; # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined diff --git a/configure.ac b/configure.ac index 70deefb6b9aea..5a3e340aa3e72 100644 --- a/configure.ac +++ b/configure.ac @@ -510,7 +510,7 @@ case $ac_sys_system/$ac_sys_release in # has no effect, don't bother defining them Darwin/@<:@6789@:>@.*) define_xopen_source=no;; - Darwin/1@<:@0-9@:>@.*) + Darwin/@<:@[12]@:>@@<:@0-9@:>@.*) define_xopen_source=no;; # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined From webhook-mailer at python.org Thu Jun 25 10:56:36 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Thu, 25 Jun 2020 14:56:36 -0000 Subject: [Python-checkins] bpo-41069: Make TESTFN and the CWD for tests containing non-ascii characters. (GH-21035) Message-ID: https://github.com/python/cpython/commit/700cfa8c90a90016638bac13c4efd03786b2b2a0 commit: 700cfa8c90a90016638bac13c4efd03786b2b2a0 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-25T17:56:31+03:00 summary: bpo-41069: Make TESTFN and the CWD for tests containing non-ascii characters. (GH-21035) files: A Misc/NEWS.d/next/Tests/2020-06-22-00-21-12.bpo-41069.bLZkX-.rst M Lib/test/libregrtest/main.py M Lib/test/support/__init__.py M Lib/test/support/os_helper.py M Lib/test/test_binhex.py M Lib/test/test_cgitb.py M Lib/test/test_compileall.py M Lib/test/test_embed.py M Lib/test/test_fstring.py M Lib/test/test_genericpath.py M Lib/test/test_gzip.py M Lib/test/test_msilib.py M Lib/test/test_ntpath.py M Lib/test/test_os.py M Lib/test/test_pdb.py M Lib/test/test_posixpath.py M Lib/test/test_tarfile.py M Lib/test/test_tools/test_pathfix.py M Lib/test/test_trace.py M Lib/test/test_urllib.py M Lib/test/test_uu.py M Lib/test/test_venv.py M Lib/test/test_warnings/__init__.py M Modules/_testcapimodule.c diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 3f9771b9308be..7675a97b5b48e 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -597,6 +597,7 @@ def create_temp_dir(self): test_cwd = 'test_python_worker_{}'.format(pid) else: test_cwd = 'test_python_{}'.format(pid) + test_cwd += support.FS_NONASCII test_cwd = os.path.join(self.tmp_dir, test_cwd) return test_cwd diff --git a/Lib/test/support/__init__.py b/Lib/test/support/__init__.py index 5707d8eeaa28b..f8f60fb6c27b9 100644 --- a/Lib/test/support/__init__.py +++ b/Lib/test/support/__init__.py @@ -20,7 +20,7 @@ forget, import_fresh_module, import_module, make_legacy_pyc, modules_cleanup, modules_setup, unload) from .os_helper import ( - FS_NONASCII, SAVEDCWD, TESTFN, TESTFN_NONASCII, + FS_NONASCII, SAVEDCWD, TESTFN, TESTFN_ASCII, TESTFN_NONASCII, TESTFN_UNENCODABLE, TESTFN_UNDECODABLE, TESTFN_UNICODE, can_symlink, can_xattr, change_cwd, create_empty_file, fd_count, diff --git a/Lib/test/support/os_helper.py b/Lib/test/support/os_helper.py index d3347027cf204..d9807a1e114b6 100644 --- a/Lib/test/support/os_helper.py +++ b/Lib/test/support/os_helper.py @@ -13,16 +13,16 @@ # Filename used for testing if os.name == 'java': # Jython disallows @ in module names - TESTFN = '$test' + TESTFN_ASCII = '$test' else: - TESTFN = '@test' + TESTFN_ASCII = '@test' # Disambiguate TESTFN for parallel testing, while letting it remain a valid # module name. -TESTFN = "{}_{}_tmp".format(TESTFN, os.getpid()) +TESTFN_ASCII = "{}_{}_tmp".format(TESTFN_ASCII, os.getpid()) # TESTFN_UNICODE is a non-ascii filename -TESTFN_UNICODE = TESTFN + "-\xe0\xf2\u0258\u0141\u011f" +TESTFN_UNICODE = TESTFN_ASCII + "-\xe0\xf2\u0258\u0141\u011f" if sys.platform == 'darwin': # In Mac OS X's VFS API file names are, by definition, canonically # decomposed Unicode, encoded using UTF-8. See QA1173: @@ -39,7 +39,7 @@ if sys.getwindowsversion().platform >= 2: # Different kinds of characters from various languages to minimize the # probability that the whole name is encodable to MBCS (issue #9819) - TESTFN_UNENCODABLE = TESTFN + "-\u5171\u0141\u2661\u0363\uDC80" + TESTFN_UNENCODABLE = TESTFN_ASCII + "-\u5171\u0141\u2661\u0363\uDC80" try: TESTFN_UNENCODABLE.encode(sys.getfilesystemencoding()) except UnicodeEncodeError: @@ -56,7 +56,7 @@ b'\xff'.decode(sys.getfilesystemencoding()) except UnicodeDecodeError: # 0xff will be encoded using the surrogate character u+DCFF - TESTFN_UNENCODABLE = TESTFN \ + TESTFN_UNENCODABLE = TESTFN_ASCII \ + b'-\xff'.decode(sys.getfilesystemencoding(), 'surrogateescape') else: # File system encoding (eg. ISO-8859-* encodings) can encode @@ -64,8 +64,8 @@ pass # FS_NONASCII: non-ASCII character encodable by os.fsencode(), -# or None if there is no such character. -FS_NONASCII = None +# or an empty string if there is no such character. +FS_NONASCII = '' for character in ( # First try printable and common characters to have a readable filename. # For each character, the encoding list are just example of encodings able @@ -141,13 +141,14 @@ try: name.decode(sys.getfilesystemencoding()) except UnicodeDecodeError: - TESTFN_UNDECODABLE = os.fsencode(TESTFN) + name + TESTFN_UNDECODABLE = os.fsencode(TESTFN_ASCII) + name break if FS_NONASCII: - TESTFN_NONASCII = TESTFN + '-' + FS_NONASCII + TESTFN_NONASCII = TESTFN_ASCII + FS_NONASCII else: TESTFN_NONASCII = None +TESTFN = TESTFN_NONASCII or TESTFN_ASCII def make_bad_fd(): diff --git a/Lib/test/test_binhex.py b/Lib/test/test_binhex.py index 859553222a3e9..591f32a4f0f7f 100644 --- a/Lib/test/test_binhex.py +++ b/Lib/test/test_binhex.py @@ -13,9 +13,10 @@ class BinHexTestCase(unittest.TestCase): def setUp(self): - self.fname1 = support.TESTFN + "1" - self.fname2 = support.TESTFN + "2" - self.fname3 = support.TESTFN + "very_long_filename__very_long_filename__very_long_filename__very_long_filename__" + # binhex supports only file names encodable to Latin1 + self.fname1 = support.TESTFN_ASCII + "1" + self.fname2 = support.TESTFN_ASCII + "2" + self.fname3 = support.TESTFN_ASCII + "very_long_filename__very_long_filename__very_long_filename__very_long_filename__" def tearDown(self): support.unlink(self.fname1) diff --git a/Lib/test/test_cgitb.py b/Lib/test/test_cgitb.py index 8991bc1ff34ba..bab152d855456 100644 --- a/Lib/test/test_cgitb.py +++ b/Lib/test/test_cgitb.py @@ -41,8 +41,9 @@ def test_syshook_no_logdir_default_format(self): rc, out, err = assert_python_failure( '-c', ('import cgitb; cgitb.enable(logdir=%s); ' - 'raise ValueError("Hello World")') % repr(tracedir)) - out = out.decode(sys.getfilesystemencoding()) + 'raise ValueError("Hello World")') % repr(tracedir), + PYTHONIOENCODING='utf-8') + out = out.decode() self.assertIn("ValueError", out) self.assertIn("Hello World", out) self.assertIn("<module>", out) @@ -56,8 +57,9 @@ def test_syshook_no_logdir_text_format(self): rc, out, err = assert_python_failure( '-c', ('import cgitb; cgitb.enable(format="text", logdir=%s); ' - 'raise ValueError("Hello World")') % repr(tracedir)) - out = out.decode(sys.getfilesystemencoding()) + 'raise ValueError("Hello World")') % repr(tracedir), + PYTHONIOENCODING='utf-8') + out = out.decode() self.assertIn("ValueError", out) self.assertIn("Hello World", out) self.assertNotIn('

', out) diff --git a/Lib/test/test_compileall.py b/Lib/test/test_compileall.py index b4061b79357b8..3bbc6817f8d56 100644 --- a/Lib/test/test_compileall.py +++ b/Lib/test/test_compileall.py @@ -456,13 +456,15 @@ def _get_run_args(self, args): def assertRunOK(self, *args, **env_vars): rc, out, err = script_helper.assert_python_ok( - *self._get_run_args(args), **env_vars) + *self._get_run_args(args), **env_vars, + PYTHONIOENCODING='utf-8') self.assertEqual(b'', err) return out def assertRunNotOK(self, *args, **env_vars): rc, out, err = script_helper.assert_python_failure( - *self._get_run_args(args), **env_vars) + *self._get_run_args(args), **env_vars, + PYTHONIOENCODING='utf-8') return rc, out, err def assertCompiled(self, fn): diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index e740fe8952999..fe47289777a42 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -1349,7 +1349,7 @@ def test_audit_run_file(self): returncode=1) def test_audit_run_interactivehook(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + (support.FS_NONASCII or '') + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + ".py" with open(startup, "w", encoding="utf-8") as f: print("import sys", file=f) print("sys.__interactivehook__ = lambda: None", file=f) @@ -1362,7 +1362,7 @@ def test_audit_run_interactivehook(self): os.unlink(startup) def test_audit_run_startup(self): - startup = os.path.join(self.oldcwd, support.TESTFN) + (support.FS_NONASCII or '') + ".py" + startup = os.path.join(self.oldcwd, support.TESTFN) + ".py" with open(startup, "w", encoding="utf-8") as f: print("pass", file=f) try: diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index 9eb7ebe10559a..7ffe01d2d8c31 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -1055,8 +1055,9 @@ def test_filename_in_syntaxerror(self): file_path = os.path.join(cwd, 't.py') with open(file_path, 'w') as f: f.write('f"{a b}"') # This generates a SyntaxError - _, _, stderr = assert_python_failure(file_path) - self.assertIn(file_path, stderr.decode('utf-8')) + _, _, stderr = assert_python_failure(file_path, + PYTHONIOENCODING='ascii') + self.assertIn(file_path.encode('ascii', 'backslashreplace'), stderr) def test_loop(self): for i in range(1000): diff --git a/Lib/test/test_genericpath.py b/Lib/test/test_genericpath.py index 9d5ac44b6d06a..e7acbcd29088b 100644 --- a/Lib/test/test_genericpath.py +++ b/Lib/test/test_genericpath.py @@ -534,7 +534,7 @@ def test_import(self): class PathLikeTests(unittest.TestCase): def setUp(self): - self.file_name = support.TESTFN.lower() + self.file_name = support.TESTFN self.file_path = FakePath(support.TESTFN) self.addCleanup(support.unlink, self.file_name) create_file(self.file_name, b"test_genericpath.PathLikeTests") diff --git a/Lib/test/test_gzip.py b/Lib/test/test_gzip.py index 78334213f24b1..0f235d1805e0d 100644 --- a/Lib/test/test_gzip.py +++ b/Lib/test/test_gzip.py @@ -328,8 +328,15 @@ def test_metadata(self): cmByte = fRead.read(1) self.assertEqual(cmByte, b'\x08') # deflate + try: + expectedname = self.filename.encode('Latin-1') + b'\x00' + expectedflags = b'\x08' # only the FNAME flag is set + except UnicodeEncodeError: + expectedname = b'' + expectedflags = b'\x00' + flagsByte = fRead.read(1) - self.assertEqual(flagsByte, b'\x08') # only the FNAME flag is set + self.assertEqual(flagsByte, expectedflags) mtimeBytes = fRead.read(4) self.assertEqual(mtimeBytes, struct.pack(' https://github.com/python/cpython/commit/c4a53e48a98b3d8fdcfc11d1b5af3ea0dd987a88 commit: c4a53e48a98b3d8fdcfc11d1b5af3ea0dd987a88 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-25T08:15:06-07:00 summary: BPO-41100: Support macOS 11 when building (GH-21113) (cherry picked from commit 8ea6353f60625c96ce96588c70ff24a77f8c71f9) Co-authored-by: Ronald Oussoren files: A Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst new file mode 100644 index 0000000000000..ded66b567a92d --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst @@ -0,0 +1 @@ +Support macOS 11 when building. diff --git a/configure b/configure index 8886561645762..96dcd0dcd5fec 100755 --- a/configure +++ b/configure @@ -3398,7 +3398,7 @@ $as_echo "#define _BSD_SOURCE 1" >>confdefs.h # has no effect, don't bother defining them Darwin/[6789].*) define_xopen_source=no;; - Darwin/1[0-9].*) + Darwin/[12][0-9].*) define_xopen_source=no;; # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined diff --git a/configure.ac b/configure.ac index d8de9d49439c3..18a044629a785 100644 --- a/configure.ac +++ b/configure.ac @@ -498,7 +498,7 @@ case $ac_sys_system/$ac_sys_release in # has no effect, don't bother defining them Darwin/@<:@6789@:>@.*) define_xopen_source=no;; - Darwin/1@<:@0-9@:>@.*) + Darwin/@<:@[12]@:>@@<:@0-9@:>@.*) define_xopen_source=no;; # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined From webhook-mailer at python.org Thu Jun 25 11:15:56 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Thu, 25 Jun 2020 15:15:56 -0000 Subject: [Python-checkins] BPO-41100: Support macOS 11 when building (GH-21113) (GH-21155) Message-ID: https://github.com/python/cpython/commit/cfbc759f918d646a59acb99251fc10b3900248a6 commit: cfbc759f918d646a59acb99251fc10b3900248a6 branch: 3.7 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-25T11:15:52-04:00 summary: BPO-41100: Support macOS 11 when building (GH-21113) (GH-21155) (cherry picked from commit 8ea6353f60625c96ce96588c70ff24a77f8c71f9) Co-authored-by: Ronald Oussoren files: A Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst M configure M configure.ac diff --git a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst new file mode 100644 index 0000000000000..ded66b567a92d --- /dev/null +++ b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst @@ -0,0 +1 @@ +Support macOS 11 when building. diff --git a/configure b/configure index 57b36e29b97f4..c807c98e568f8 100755 --- a/configure +++ b/configure @@ -3374,7 +3374,7 @@ $as_echo "#define _BSD_SOURCE 1" >>confdefs.h # has no effect, don't bother defining them Darwin/[6789].*) define_xopen_source=no;; - Darwin/1[0-9].*) + Darwin/[12][0-9].*) define_xopen_source=no;; # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined diff --git a/configure.ac b/configure.ac index f9dabd86c2cfc..805c0bba08deb 100644 --- a/configure.ac +++ b/configure.ac @@ -490,7 +490,7 @@ case $ac_sys_system/$ac_sys_release in # has no effect, don't bother defining them Darwin/@<:@6789@:>@.*) define_xopen_source=no;; - Darwin/1@<:@0-9@:>@.*) + Darwin/@<:@[12]@:>@@<:@0-9@:>@.*) define_xopen_source=no;; # On AIX 4 and 5.1, mbstate_t is defined only when _XOPEN_SOURCE == 500 but # used in wcsnrtombs() and mbsnrtowcs() even if _XOPEN_SOURCE is not defined From webhook-mailer at python.org Thu Jun 25 13:18:06 2020 From: webhook-mailer at python.org (Hai Shi) Date: Thu, 25 Jun 2020 17:18:06 -0000 Subject: [Python-checkins] bpo-40275: Use new test.support helper submodules in tests (GH-21151) Message-ID: https://github.com/python/cpython/commit/847f94f47b104aec678d1d2a2d8fe23d817f375e commit: 847f94f47b104aec678d1d2a2d8fe23d817f375e branch: master author: Hai Shi committer: GitHub date: 2020-06-25T19:17:57+02:00 summary: bpo-40275: Use new test.support helper submodules in tests (GH-21151) Use new test.support helper submodules in tests: * distutils tests * test_buffer * test_compile * test_filecmp * test_fileinput * test_readline * test_smtpnet * test_structmembers * test_tools files: M Lib/distutils/tests/test_archive_util.py M Lib/distutils/tests/test_bdist_msi.py M Lib/distutils/tests/test_bdist_wininst.py M Lib/distutils/tests/test_core.py M Lib/distutils/tests/test_dist.py M Lib/distutils/tests/test_extension.py M Lib/distutils/tests/test_file_util.py M Lib/distutils/tests/test_filelist.py M Lib/distutils/tests/test_register.py M Lib/distutils/tests/test_sdist.py M Lib/distutils/tests/test_sysconfig.py M Lib/distutils/tests/test_unixccompiler.py M Lib/test/test_buffer.py M Lib/test/test_compile.py M Lib/test/test_filecmp.py M Lib/test/test_fileinput.py M Lib/test/test_readline.py M Lib/test/test_smtpnet.py M Lib/test/test_structmembers.py M Lib/test/test_tools/__init__.py diff --git a/Lib/distutils/tests/test_archive_util.py b/Lib/distutils/tests/test_archive_util.py index e9aad0e40fd14..edcec2513e0d9 100644 --- a/Lib/distutils/tests/test_archive_util.py +++ b/Lib/distutils/tests/test_archive_util.py @@ -13,7 +13,9 @@ ARCHIVE_FORMATS) from distutils.spawn import find_executable, spawn from distutils.tests import support -from test.support import check_warnings, run_unittest, patch, change_cwd +from test.support import run_unittest, patch +from test.support.os_helper import change_cwd +from test.support.warnings_helper import check_warnings try: import grp diff --git a/Lib/distutils/tests/test_bdist_msi.py b/Lib/distutils/tests/test_bdist_msi.py index 418e60ec72977..a61266a14f967 100644 --- a/Lib/distutils/tests/test_bdist_msi.py +++ b/Lib/distutils/tests/test_bdist_msi.py @@ -1,7 +1,8 @@ """Tests for distutils.command.bdist_msi.""" import sys import unittest -from test.support import run_unittest, check_warnings +from test.support import run_unittest +from test.support.warnings_helper import check_warnings from distutils.tests import support diff --git a/Lib/distutils/tests/test_bdist_wininst.py b/Lib/distutils/tests/test_bdist_wininst.py index 5c3d025d3321d..c338069a1dcf2 100644 --- a/Lib/distutils/tests/test_bdist_wininst.py +++ b/Lib/distutils/tests/test_bdist_wininst.py @@ -2,7 +2,8 @@ import sys import platform import unittest -from test.support import run_unittest, check_warnings +from test.support import run_unittest +from test.support.warnings_helper import check_warnings from distutils.command.bdist_wininst import bdist_wininst from distutils.tests import support diff --git a/Lib/distutils/tests/test_core.py b/Lib/distutils/tests/test_core.py index 27ce7324afcfb..4e6694a3d1cd0 100644 --- a/Lib/distutils/tests/test_core.py +++ b/Lib/distutils/tests/test_core.py @@ -5,8 +5,8 @@ import os import shutil import sys -import test.support from test.support import captured_stdout, run_unittest +from test.support import os_helper import unittest from distutils.tests import support from distutils import log @@ -62,13 +62,13 @@ def tearDown(self): super(CoreTestCase, self).tearDown() def cleanup_testfn(self): - path = test.support.TESTFN + path = os_helper.TESTFN if os.path.isfile(path): os.remove(path) elif os.path.isdir(path): shutil.rmtree(path) - def write_setup(self, text, path=test.support.TESTFN): + def write_setup(self, text, path=os_helper.TESTFN): f = open(path, "w") try: f.write(text) @@ -105,8 +105,8 @@ def test_run_setup_uses_current_dir(self): cwd = os.getcwd() # Create a directory and write the setup.py file there: - os.mkdir(test.support.TESTFN) - setup_py = os.path.join(test.support.TESTFN, "setup.py") + os.mkdir(os_helper.TESTFN) + setup_py = os.path.join(os_helper.TESTFN, "setup.py") distutils.core.run_setup( self.write_setup(setup_prints_cwd, path=setup_py)) diff --git a/Lib/distutils/tests/test_dist.py b/Lib/distutils/tests/test_dist.py index 60956dadef234..f8a9e86b16f0b 100644 --- a/Lib/distutils/tests/test_dist.py +++ b/Lib/distutils/tests/test_dist.py @@ -12,8 +12,9 @@ from distutils.cmd import Command from test.support import ( - TESTFN, captured_stdout, captured_stderr, run_unittest + captured_stdout, captured_stderr, run_unittest ) +from test.support.os_helper import TESTFN from distutils.tests import support from distutils import log diff --git a/Lib/distutils/tests/test_extension.py b/Lib/distutils/tests/test_extension.py index e35f2738b6a21..81fad02dbec82 100644 --- a/Lib/distutils/tests/test_extension.py +++ b/Lib/distutils/tests/test_extension.py @@ -3,7 +3,8 @@ import os import warnings -from test.support import check_warnings, run_unittest +from test.support import run_unittest +from test.support.warnings_helper import check_warnings from distutils.extension import read_setup_file, Extension class ExtensionTestCase(unittest.TestCase): diff --git a/Lib/distutils/tests/test_file_util.py b/Lib/distutils/tests/test_file_util.py index a4e2d025f9661..c7783b858d583 100644 --- a/Lib/distutils/tests/test_file_util.py +++ b/Lib/distutils/tests/test_file_util.py @@ -8,7 +8,9 @@ from distutils import log from distutils.tests import support from distutils.errors import DistutilsFileError -from test.support import run_unittest, unlink +from test.support import run_unittest +from test.support.os_helper import unlink + class FileUtilTestCase(support.TempdirManager, unittest.TestCase): diff --git a/Lib/distutils/tests/test_filelist.py b/Lib/distutils/tests/test_filelist.py index c71342d0dc4e7..2c26c22617ed4 100644 --- a/Lib/distutils/tests/test_filelist.py +++ b/Lib/distutils/tests/test_filelist.py @@ -9,6 +9,7 @@ from distutils import filelist import test.support +from test.support import os_helper from test.support import captured_stdout, run_unittest from distutils.tests import support @@ -295,7 +296,7 @@ def test_process_template(self): class FindAllTestCase(unittest.TestCase): - @test.support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_missing_symlink(self): with test.support.temp_cwd(): os.symlink('foo', 'bar') diff --git a/Lib/distutils/tests/test_register.py b/Lib/distutils/tests/test_register.py index e68b0af3ce0c3..bba48633c9c14 100644 --- a/Lib/distutils/tests/test_register.py +++ b/Lib/distutils/tests/test_register.py @@ -5,7 +5,8 @@ import urllib import warnings -from test.support import check_warnings, run_unittest +from test.support import run_unittest +from test.support.warnings_helper import check_warnings from distutils.command import register as register_module from distutils.command.register import register diff --git a/Lib/distutils/tests/test_sdist.py b/Lib/distutils/tests/test_sdist.py index 23db1269591d6..752e9db5ba5d9 100644 --- a/Lib/distutils/tests/test_sdist.py +++ b/Lib/distutils/tests/test_sdist.py @@ -6,7 +6,8 @@ import zipfile from os.path import join from textwrap import dedent -from test.support import captured_stdout, check_warnings, run_unittest +from test.support import captured_stdout, run_unittest +from test.support.warnings_helper import check_warnings try: import zlib diff --git a/Lib/distutils/tests/test_sysconfig.py b/Lib/distutils/tests/test_sysconfig.py index 236755d095272..59676b0e0b0ce 100644 --- a/Lib/distutils/tests/test_sysconfig.py +++ b/Lib/distutils/tests/test_sysconfig.py @@ -10,7 +10,10 @@ from distutils import sysconfig from distutils.ccompiler import get_default_compiler from distutils.tests import support -from test.support import TESTFN, run_unittest, check_warnings, swap_item +from test.support import run_unittest, swap_item +from test.support.os_helper import TESTFN +from test.support.warnings_helper import check_warnings + class SysconfigTestCase(support.EnvironGuard, unittest.TestCase): def setUp(self): diff --git a/Lib/distutils/tests/test_unixccompiler.py b/Lib/distutils/tests/test_unixccompiler.py index eef702cf01809..eefe4ba40291e 100644 --- a/Lib/distutils/tests/test_unixccompiler.py +++ b/Lib/distutils/tests/test_unixccompiler.py @@ -1,7 +1,8 @@ """Tests for distutils.unixccompiler.""" import sys import unittest -from test.support import EnvironmentVarGuard, run_unittest +from test.support import run_unittest +from test.support.os_helper import EnvironmentVarGuard from distutils import sysconfig from distutils.unixccompiler import UnixCCompiler diff --git a/Lib/test/test_buffer.py b/Lib/test/test_buffer.py index d440bcf7e0faa..468c6ea9def92 100644 --- a/Lib/test/test_buffer.py +++ b/Lib/test/test_buffer.py @@ -16,6 +16,7 @@ import contextlib import unittest from test import support +from test.support import os_helper from itertools import permutations, product from random import randrange, sample, choice import warnings @@ -39,7 +40,7 @@ ctypes = None try: - with support.EnvironmentVarGuard() as os.environ, \ + with os_helper.EnvironmentVarGuard() as os.environ, \ warnings.catch_warnings(): from numpy import ndarray as numpy_array except ImportError: diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py index 566ca27fca893..3dd8c8d1db810 100644 --- a/Lib/test/test_compile.py +++ b/Lib/test/test_compile.py @@ -7,7 +7,9 @@ import tempfile import types from test import support -from test.support import script_helper, FakePath +from test.support import script_helper +from test.support.os_helper import FakePath + class TestSpecifics(unittest.TestCase): diff --git a/Lib/test/test_filecmp.py b/Lib/test/test_filecmp.py index b5b24a24c8dde..ca9b4f354a5c0 100644 --- a/Lib/test/test_filecmp.py +++ b/Lib/test/test_filecmp.py @@ -5,13 +5,14 @@ import unittest from test import support +from test.support import os_helper class FileCompareTestCase(unittest.TestCase): def setUp(self): - self.name = support.TESTFN - self.name_same = support.TESTFN + '-same' - self.name_diff = support.TESTFN + '-diff' + self.name = os_helper.TESTFN + self.name_same = os_helper.TESTFN + '-same' + self.name_diff = os_helper.TESTFN + '-diff' data = 'Contents of file go here.\n' for name in [self.name, self.name_same, self.name_diff]: with open(name, 'w') as output: diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py index 014f19e6cbdb1..d5edf74938548 100644 --- a/Lib/test/test_fileinput.py +++ b/Lib/test/test_fileinput.py @@ -24,8 +24,11 @@ from fileinput import FileInput, hook_encoded from pathlib import Path -from test.support import verbose, TESTFN, check_warnings -from test.support import unlink as safe_unlink +from test.support import verbose +from test.support.os_helper import TESTFN +from test.support.os_helper import unlink as safe_unlink +from test.support import os_helper +from test.support import warnings_helper from test import support from unittest import mock @@ -39,7 +42,7 @@ class BaseTests: # temp file's name. def writeTmp(self, content, *, mode='w'): # opening in text mode is the default fd, name = tempfile.mkstemp() - self.addCleanup(support.unlink, name) + self.addCleanup(os_helper.unlink, name) with open(fd, mode) as f: f.write(content) return name @@ -234,9 +237,9 @@ def test_opening_mode(self): pass # try opening in universal newline mode t1 = self.writeTmp(b"A\nB\r\nC\rD", mode="wb") - with check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): fi = FileInput(files=t1, mode="U") - with check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): lines = list(fi) self.assertEqual(lines, ["A\n", "B\n", "C\n", "D"]) @@ -353,7 +356,7 @@ def test_empty_files_list_specified_to_constructor(self): with FileInput(files=[]) as fi: self.assertEqual(fi._files, ('-',)) - @support.ignore_warnings(category=DeprecationWarning) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test__getitem__(self): """Tests invoking FileInput.__getitem__() with the current line number""" @@ -371,7 +374,7 @@ def test__getitem___deprecation(self): with FileInput(files=[t]) as fi: self.assertEqual(fi[0], "line1\n") - @support.ignore_warnings(category=DeprecationWarning) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test__getitem__invalid_key(self): """Tests invoking FileInput.__getitem__() with an index unequal to the line number""" @@ -381,7 +384,7 @@ def test__getitem__invalid_key(self): fi[1] self.assertEqual(cm.exception.args, ("accessing lines out of order",)) - @support.ignore_warnings(category=DeprecationWarning) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test__getitem__eof(self): """Tests invoking FileInput.__getitem__() with the line number but at end-of-input""" @@ -400,7 +403,7 @@ def test_nextfile_oserror_deleting_backup(self): os_unlink_replacement = UnconditionallyRaise(OSError) try: t = self.writeTmp("\n") - self.addCleanup(support.unlink, t + '.bak') + self.addCleanup(safe_unlink, t + '.bak') with FileInput(files=[t], inplace=True) as fi: next(fi) # make sure the file is opened os.unlink = os_unlink_replacement diff --git a/Lib/test/test_readline.py b/Lib/test/test_readline.py index 67ee9b7f7cfb1..de573bef9f963 100644 --- a/Lib/test/test_readline.py +++ b/Lib/test/test_readline.py @@ -10,7 +10,9 @@ import sys import tempfile import unittest -from test.support import import_module, unlink, temp_dir, TESTFN, verbose +from test.support import verbose +from test.support.import_helper import import_module +from test.support.os_helper import unlink, temp_dir, TESTFN from test.support.script_helper import assert_python_ok # Skip tests if there is no readline module diff --git a/Lib/test/test_smtpnet.py b/Lib/test/test_smtpnet.py index 74a00a9d7cc58..72f51cd8d81f5 100644 --- a/Lib/test/test_smtpnet.py +++ b/Lib/test/test_smtpnet.py @@ -1,10 +1,11 @@ import unittest from test import support +from test.support import import_helper from test.support import socket_helper import smtplib import socket -ssl = support.import_module("ssl") +ssl = import_helper.import_module("ssl") support.requires("network") diff --git a/Lib/test/test_structmembers.py b/Lib/test/test_structmembers.py index 57ec45f3f92ff..07d2f623f7156 100644 --- a/Lib/test/test_structmembers.py +++ b/Lib/test/test_structmembers.py @@ -1,8 +1,9 @@ import unittest -from test import support +from test.support import import_helper +from test.support import warnings_helper # Skip this test if the _testcapi module isn't available. -support.import_module('_testcapi') +import_helper.import_module('_testcapi') from _testcapi import _test_structmembersType, \ CHAR_MAX, CHAR_MIN, UCHAR_MAX, \ SHRT_MAX, SHRT_MIN, USHRT_MAX, \ @@ -116,27 +117,27 @@ def test_inplace_string(self): class TestWarnings(unittest.TestCase): def test_byte_max(self): - with support.check_warnings(('', RuntimeWarning)): + with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_BYTE = CHAR_MAX+1 def test_byte_min(self): - with support.check_warnings(('', RuntimeWarning)): + with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_BYTE = CHAR_MIN-1 def test_ubyte_max(self): - with support.check_warnings(('', RuntimeWarning)): + with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_UBYTE = UCHAR_MAX+1 def test_short_max(self): - with support.check_warnings(('', RuntimeWarning)): + with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_SHORT = SHRT_MAX+1 def test_short_min(self): - with support.check_warnings(('', RuntimeWarning)): + with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_SHORT = SHRT_MIN-1 def test_ushort_max(self): - with support.check_warnings(('', RuntimeWarning)): + with warnings_helper.check_warnings(('', RuntimeWarning)): ts.T_USHORT = USHRT_MAX+1 diff --git a/Lib/test/test_tools/__init__.py b/Lib/test/test_tools/__init__.py index eb9acad677d58..61af6578e0953 100644 --- a/Lib/test/test_tools/__init__.py +++ b/Lib/test/test_tools/__init__.py @@ -4,6 +4,7 @@ import os.path import unittest from test import support +from test.support import import_helper basepath = os.path.normpath( os.path.dirname( # @@ -26,11 +27,11 @@ def skip_if_missing(tool=None): @contextlib.contextmanager def imports_under_tool(name, *subdirs): tooldir = os.path.join(toolsdir, name, *subdirs) - with support.DirsOnSysPath(tooldir) as cm: + with import_helper.DirsOnSysPath(tooldir) as cm: yield cm def import_tool(toolname): - with support.DirsOnSysPath(scriptsdir): + with import_helper.DirsOnSysPath(scriptsdir): return importlib.import_module(toolname) def load_tests(*args): From webhook-mailer at python.org Thu Jun 25 17:50:47 2020 From: webhook-mailer at python.org (Ram Rachum) Date: Thu, 25 Jun 2020 21:50:47 -0000 Subject: [Python-checkins] Fix typo in functions.rst (GH-21131) Message-ID: https://github.com/python/cpython/commit/77ed29b2c2742b694cac6db5976afc31b58e4803 commit: 77ed29b2c2742b694cac6db5976afc31b58e4803 branch: master author: Ram Rachum committer: GitHub date: 2020-06-25T14:50:37-07:00 summary: Fix typo in functions.rst (GH-21131) files: M Doc/library/functions.rst diff --git a/Doc/library/functions.rst b/Doc/library/functions.rst index 0577de6fbfeeb..f4110c3585a0b 100644 --- a/Doc/library/functions.rst +++ b/Doc/library/functions.rst @@ -1773,7 +1773,7 @@ are always available. They are listed here in alphabetical order. ValueError: zip() argument 2 is longer than argument 1 Without the ``strict=True`` argument, any bug that results in iterables of - different lengths will be silenced, possibly mainfesting as a hard-to-find + different lengths will be silenced, possibly manifesting as a hard-to-find bug in another part of the program. * Shorter iterables can be padded with a constant value to make all the From webhook-mailer at python.org Thu Jun 25 19:07:29 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Thu, 25 Jun 2020 23:07:29 -0000 Subject: [Python-checkins] bpo-41103: Remove old buffer protocol support (#21117) Message-ID: https://github.com/python/cpython/commit/6f8a6ee59cb7f99f68df8ee9c3e8c8cf19af3eed commit: 6f8a6ee59cb7f99f68df8ee9c3e8c8cf19af3eed branch: master author: Inada Naoki committer: GitHub date: 2020-06-26T08:07:22+09:00 summary: bpo-41103: Remove old buffer protocol support (#21117) They are deprecated since Python 3.0. files: A Misc/NEWS.d/next/C API/2020-06-24-22-57-07.bpo-41103.doojgE.rst D Doc/c-api/objbuffer.rst M Doc/c-api/abstract.rst M Doc/data/refcounts.dat M Doc/whatsnew/3.10.rst M Include/abstract.h M Objects/abstract.c M PC/python3dll.c diff --git a/Doc/c-api/abstract.rst b/Doc/c-api/abstract.rst index 1823f9d70c79f..f5df09fa7fd78 100644 --- a/Doc/c-api/abstract.rst +++ b/Doc/c-api/abstract.rst @@ -24,4 +24,3 @@ but whose items have not been set to some non-\ ``NULL`` value yet. mapping.rst iter.rst buffer.rst - objbuffer.rst diff --git a/Doc/c-api/objbuffer.rst b/Doc/c-api/objbuffer.rst deleted file mode 100644 index 6b82a642d7ee4..0000000000000 --- a/Doc/c-api/objbuffer.rst +++ /dev/null @@ -1,55 +0,0 @@ -.. highlight:: c - -Old Buffer Protocol -------------------- - -.. deprecated:: 3.0 - -These functions were part of the "old buffer protocol" API in Python 2. -In Python 3, this protocol doesn't exist anymore but the functions are still -exposed to ease porting 2.x code. They act as a compatibility wrapper -around the :ref:`new buffer protocol `, but they don't give -you control over the lifetime of the resources acquired when a buffer is -exported. - -Therefore, it is recommended that you call :c:func:`PyObject_GetBuffer` -(or the ``y*`` or ``w*`` :ref:`format codes ` with the -:c:func:`PyArg_ParseTuple` family of functions) to get a buffer view over -an object, and :c:func:`PyBuffer_Release` when the buffer view can be released. - - -.. c:function:: int PyObject_AsCharBuffer(PyObject *obj, const char **buffer, Py_ssize_t *buffer_len) - - Returns a pointer to a read-only memory location usable as character-based - input. The *obj* argument must support the single-segment character buffer - interface. On success, returns ``0``, sets *buffer* to the memory location - and *buffer_len* to the buffer length. Returns ``-1`` and sets a - :exc:`TypeError` on error. - - -.. c:function:: int PyObject_AsReadBuffer(PyObject *obj, const void **buffer, Py_ssize_t *buffer_len) - - Returns a pointer to a read-only memory location containing arbitrary data. - The *obj* argument must support the single-segment readable buffer - interface. On success, returns ``0``, sets *buffer* to the memory location - and *buffer_len* to the buffer length. Returns ``-1`` and sets a - :exc:`TypeError` on error. - - -.. c:function:: int PyObject_CheckReadBuffer(PyObject *o) - - Returns ``1`` if *o* supports the single-segment readable buffer interface. - Otherwise returns ``0``. This function always succeeds. - - Note that this function tries to get and release a buffer, and exceptions - which occur while calling corresponding functions will get suppressed. - To get error reporting use :c:func:`PyObject_GetBuffer()` instead. - - -.. c:function:: int PyObject_AsWriteBuffer(PyObject *obj, void **buffer, Py_ssize_t *buffer_len) - - Returns a pointer to a writable memory location. The *obj* argument must - support the single-segment, character buffer interface. On success, - returns ``0``, sets *buffer* to the memory location and *buffer_len* to the - buffer length. Returns ``-1`` and sets a :exc:`TypeError` on error. - diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index 4dacbe201d22a..1215c96cd5342 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -1568,21 +1568,6 @@ PyOS_FSPath:PyObject*:path:0: PyObject_ASCII:PyObject*::+1: PyObject_ASCII:PyObject*:o:0: -PyObject_AsCharBuffer:int::: -PyObject_AsCharBuffer:PyObject*:obj:0: -PyObject_AsCharBuffer:const char**:buffer:: -PyObject_AsCharBuffer:Py_ssize_t*:buffer_len:: - -PyObject_AsReadBuffer:int::: -PyObject_AsReadBuffer:PyObject*:obj:0: -PyObject_AsReadBuffer:const void**:buffer:: -PyObject_AsReadBuffer:Py_ssize_t*:buffer_len:: - -PyObject_AsWriteBuffer:int::: -PyObject_AsWriteBuffer:PyObject*:obj:0: -PyObject_AsWriteBuffer:void**:buffer:: -PyObject_AsWriteBuffer:Py_ssize_t*:buffer_len:: - PyObject_Bytes:PyObject*::+1: PyObject_Bytes:PyObject*:o:0: @@ -1618,9 +1603,6 @@ PyObject_CallObject:PyObject*:args:0: PyObject_CheckBuffer:int::: PyObject_CheckBuffer:PyObject*:obj:0: -PyObject_CheckReadBuffer:int::: -PyObject_CheckReadBuffer:PyObject*:o:0: - PyObject_DelAttr:int::: PyObject_DelAttr:PyObject*:o:0: PyObject_DelAttr:PyObject*:attr_name:0: diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 89958450200f9..060d5debf91a9 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -204,3 +204,8 @@ Porting to Python 3.10 Removed ------- + +* ``PyObject_AsCharBuffer()``, ``PyObject_AsReadBuffer()``, ``PyObject_CheckReadBuffer()``, + and ``PyObject_AsWriteBuffer()`` are removed. Please migrate to new buffer protocol; + :c:func:`PyObject_GetBuffer` and :c:func:`PyBuffer_Release`. + (Contributed by Inada Naoki in :issue:`41103`. diff --git a/Include/abstract.h b/Include/abstract.h index bb51c668ac698..a23b7dc78f480 100644 --- a/Include/abstract.h +++ b/Include/abstract.h @@ -309,53 +309,6 @@ PyAPI_FUNC(int) PyObject_DelItemString(PyObject *o, const char *key); PyAPI_FUNC(int) PyObject_DelItem(PyObject *o, PyObject *key); -/* === Old Buffer API ============================================ */ - -/* FIXME: usage of these should all be replaced in Python itself - but for backwards compatibility we will implement them. - Their usage without a corresponding "unlock" mechanism - may create issues (but they would already be there). */ - -/* Takes an arbitrary object which must support the (character, single segment) - buffer interface and returns a pointer to a read-only memory location - useable as character based input for subsequent processing. - - Return 0 on success. buffer and buffer_len are only set in case no error - occurs. Otherwise, -1 is returned and an exception set. */ -Py_DEPRECATED(3.0) -PyAPI_FUNC(int) PyObject_AsCharBuffer(PyObject *obj, - const char **buffer, - Py_ssize_t *buffer_len); - -/* Checks whether an arbitrary object supports the (character, single segment) - buffer interface. - - Returns 1 on success, 0 on failure. */ -Py_DEPRECATED(3.0) PyAPI_FUNC(int) PyObject_CheckReadBuffer(PyObject *obj); - -/* Same as PyObject_AsCharBuffer() except that this API expects (readable, - single segment) buffer interface and returns a pointer to a read-only memory - location which can contain arbitrary data. - - 0 is returned on success. buffer and buffer_len are only set in case no - error occurs. Otherwise, -1 is returned and an exception set. */ -Py_DEPRECATED(3.0) -PyAPI_FUNC(int) PyObject_AsReadBuffer(PyObject *obj, - const void **buffer, - Py_ssize_t *buffer_len); - -/* Takes an arbitrary object which must support the (writable, single segment) - buffer interface and returns a pointer to a writable memory location in - buffer of size 'buffer_len'. - - Return 0 on success. buffer and buffer_len are only set in case no error - occurs. Otherwise, -1 is returned and an exception set. */ -Py_DEPRECATED(3.0) -PyAPI_FUNC(int) PyObject_AsWriteBuffer(PyObject *obj, - void **buffer, - Py_ssize_t *buffer_len); - - /* === New Buffer API ============================================ */ /* Takes an arbitrary object and returns the result of calling diff --git a/Misc/NEWS.d/next/C API/2020-06-24-22-57-07.bpo-41103.doojgE.rst b/Misc/NEWS.d/next/C API/2020-06-24-22-57-07.bpo-41103.doojgE.rst new file mode 100644 index 0000000000000..082b77b9035cb --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-24-22-57-07.bpo-41103.doojgE.rst @@ -0,0 +1,4 @@ +``PyObject_AsCharBuffer()``, ``PyObject_AsReadBuffer()``, +``PyObject_CheckReadBuffer()``, and ``PyObject_AsWriteBuffer()`` are +removed. Please migrate to new buffer protocol; :c:func:`PyObject_GetBuffer` +and :c:func:`PyBuffer_Release`. diff --git a/Objects/abstract.c b/Objects/abstract.c index b9e7111299e2f..0d3f4ac6e1747 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -292,85 +292,6 @@ PyObject_CheckBuffer(PyObject *obj) } -/* We release the buffer right after use of this function which could - cause issues later on. Don't use these functions in new code. - */ -int -PyObject_CheckReadBuffer(PyObject *obj) -{ - PyBufferProcs *pb = Py_TYPE(obj)->tp_as_buffer; - Py_buffer view; - - if (pb == NULL || - pb->bf_getbuffer == NULL) - return 0; - if ((*pb->bf_getbuffer)(obj, &view, PyBUF_SIMPLE) == -1) { - PyErr_Clear(); - return 0; - } - PyBuffer_Release(&view); - return 1; -} - -static int -as_read_buffer(PyObject *obj, const void **buffer, Py_ssize_t *buffer_len) -{ - Py_buffer view; - - if (obj == NULL || buffer == NULL || buffer_len == NULL) { - null_error(); - return -1; - } - if (PyObject_GetBuffer(obj, &view, PyBUF_SIMPLE) != 0) - return -1; - - *buffer = view.buf; - *buffer_len = view.len; - PyBuffer_Release(&view); - return 0; -} - -int -PyObject_AsCharBuffer(PyObject *obj, - const char **buffer, - Py_ssize_t *buffer_len) -{ - return as_read_buffer(obj, (const void **)buffer, buffer_len); -} - -int PyObject_AsReadBuffer(PyObject *obj, - const void **buffer, - Py_ssize_t *buffer_len) -{ - return as_read_buffer(obj, buffer, buffer_len); -} - -int PyObject_AsWriteBuffer(PyObject *obj, - void **buffer, - Py_ssize_t *buffer_len) -{ - PyBufferProcs *pb; - Py_buffer view; - - if (obj == NULL || buffer == NULL || buffer_len == NULL) { - null_error(); - return -1; - } - pb = Py_TYPE(obj)->tp_as_buffer; - if (pb == NULL || - pb->bf_getbuffer == NULL || - ((*pb->bf_getbuffer)(obj, &view, PyBUF_WRITABLE) != 0)) { - PyErr_SetString(PyExc_TypeError, - "expected a writable bytes-like object"); - return -1; - } - - *buffer = view.buf; - *buffer_len = view.len; - PyBuffer_Release(&view); - return 0; -} - /* Buffer C-API for Python 3.0 */ int diff --git a/PC/python3dll.c b/PC/python3dll.c index 2f29e83f612a0..f72f2c8af19d2 100644 --- a/PC/python3dll.c +++ b/PC/python3dll.c @@ -392,11 +392,8 @@ EXPORT_FUNC(PyNumber_Subtract) EXPORT_FUNC(PyNumber_ToBase) EXPORT_FUNC(PyNumber_TrueDivide) EXPORT_FUNC(PyNumber_Xor) -EXPORT_FUNC(PyObject_AsCharBuffer) EXPORT_FUNC(PyObject_ASCII) EXPORT_FUNC(PyObject_AsFileDescriptor) -EXPORT_FUNC(PyObject_AsReadBuffer) -EXPORT_FUNC(PyObject_AsWriteBuffer) EXPORT_FUNC(PyObject_Bytes) EXPORT_FUNC(PyObject_Call) EXPORT_FUNC(PyObject_CallFunction) @@ -405,7 +402,6 @@ EXPORT_FUNC(PyObject_CallMethod) EXPORT_FUNC(PyObject_CallMethodObjArgs) EXPORT_FUNC(PyObject_CallObject) EXPORT_FUNC(PyObject_Calloc) -EXPORT_FUNC(PyObject_CheckReadBuffer) EXPORT_FUNC(PyObject_ClearWeakRefs) EXPORT_FUNC(PyObject_DelItem) EXPORT_FUNC(PyObject_DelItemString) From webhook-mailer at python.org Thu Jun 25 19:22:42 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Thu, 25 Jun 2020 23:22:42 -0000 Subject: [Python-checkins] bpo-41119: Output correct error message for list/tuple followed by colon (GH-21160) Message-ID: https://github.com/python/cpython/commit/4b85e60601489f9ee9dd2909e28d89a31566887c commit: 4b85e60601489f9ee9dd2909e28d89a31566887c branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-26T00:22:36+01:00 summary: bpo-41119: Output correct error message for list/tuple followed by colon (GH-21160) files: M Grammar/python.gram M Parser/parser.c diff --git a/Grammar/python.gram b/Grammar/python.gram index c5a5dbe1724f3..652f0db2b175d 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -646,18 +646,18 @@ invalid_named_expression: RAISE_SYNTAX_ERROR_KNOWN_LOCATION( a, "cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } invalid_assignment: - | a=list ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not list) can be annotated") } - | a=tuple ':' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } - | a=star_named_expression ',' star_named_expressions* ':' { + | a=list ':' expression { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not list) can be annotated") } + | a=tuple ':' expression { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=star_named_expression ',' star_named_expressions* ':' expression { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } - | a=expression ':' expression ['=' annotated_rhs] { + | a=expression ':' expression { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "illegal target for annotation") } | (star_targets '=')* a=star_expressions '=' { RAISE_SYNTAX_ERROR_INVALID_TARGET(STAR_TARGETS, a) } | (star_targets '=')* a=yield_expr '=' { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "assignment to yield expression not possible") } | a=star_expressions augassign (yield_expr | star_expressions) { RAISE_SYNTAX_ERROR_KNOWN_LOCATION( - a, + a, "'%s' is an illegal expression for augmented assignment", _PyPegen_get_expr_name(a) )} diff --git a/Parser/parser.c b/Parser/parser.c index 323cd0e0efae3..a235c251fc9ac 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -353,14 +353,14 @@ static KeywordToken *reserved_keywords[] = { #define _gather_124_type 1284 #define _tmp_126_type 1285 #define _loop0_127_type 1286 -#define _tmp_128_type 1287 +#define _loop0_128_type 1287 #define _loop0_129_type 1288 -#define _loop0_130_type 1289 +#define _tmp_130_type 1289 #define _tmp_131_type 1290 -#define _tmp_132_type 1291 -#define _loop0_133_type 1292 -#define _tmp_134_type 1293 -#define _loop0_135_type 1294 +#define _loop0_132_type 1291 +#define _tmp_133_type 1292 +#define _loop0_134_type 1293 +#define _tmp_135_type 1294 #define _tmp_136_type 1295 #define _tmp_137_type 1296 #define _tmp_138_type 1297 @@ -376,11 +376,10 @@ static KeywordToken *reserved_keywords[] = { #define _tmp_148_type 1307 #define _tmp_149_type 1308 #define _tmp_150_type 1309 -#define _tmp_151_type 1310 +#define _loop1_151_type 1310 #define _loop1_152_type 1311 -#define _loop1_153_type 1312 +#define _tmp_153_type 1312 #define _tmp_154_type 1313 -#define _tmp_155_type 1314 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -669,14 +668,14 @@ static asdl_seq *_loop0_125_rule(Parser *p); static asdl_seq *_gather_124_rule(Parser *p); static void *_tmp_126_rule(Parser *p); static asdl_seq *_loop0_127_rule(Parser *p); -static void *_tmp_128_rule(Parser *p); +static asdl_seq *_loop0_128_rule(Parser *p); static asdl_seq *_loop0_129_rule(Parser *p); -static asdl_seq *_loop0_130_rule(Parser *p); +static void *_tmp_130_rule(Parser *p); static void *_tmp_131_rule(Parser *p); -static void *_tmp_132_rule(Parser *p); -static asdl_seq *_loop0_133_rule(Parser *p); -static void *_tmp_134_rule(Parser *p); -static asdl_seq *_loop0_135_rule(Parser *p); +static asdl_seq *_loop0_132_rule(Parser *p); +static void *_tmp_133_rule(Parser *p); +static asdl_seq *_loop0_134_rule(Parser *p); +static void *_tmp_135_rule(Parser *p); static void *_tmp_136_rule(Parser *p); static void *_tmp_137_rule(Parser *p); static void *_tmp_138_rule(Parser *p); @@ -692,11 +691,10 @@ static void *_tmp_147_rule(Parser *p); static void *_tmp_148_rule(Parser *p); static void *_tmp_149_rule(Parser *p); static void *_tmp_150_rule(Parser *p); -static void *_tmp_151_rule(Parser *p); +static asdl_seq *_loop1_151_rule(Parser *p); static asdl_seq *_loop1_152_rule(Parser *p); -static asdl_seq *_loop1_153_rule(Parser *p); +static void *_tmp_153_rule(Parser *p); static void *_tmp_154_rule(Parser *p); -static void *_tmp_155_rule(Parser *p); // file: statements? $ @@ -14662,10 +14660,10 @@ invalid_named_expression_rule(Parser *p) } // invalid_assignment: -// | list ':' -// | tuple ':' -// | star_named_expression ',' star_named_expressions* ':' -// | expression ':' expression ['=' annotated_rhs] +// | list ':' expression +// | tuple ':' expression +// | star_named_expression ',' star_named_expressions* ':' expression +// | expression ':' expression // | ((star_targets '='))* star_expressions '=' // | ((star_targets '='))* yield_expr '=' // | star_expressions augassign (yield_expr | star_expressions) @@ -14679,21 +14677,24 @@ invalid_assignment_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // list ':' + { // list ':' expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list ':'")); + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list ':' expression")); Token * _literal; expr_ty a; + expr_ty expression_var; if ( (a = list_rule(p)) // list && (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list ':'")); + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list ':' expression")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -14704,23 +14705,26 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list ':'")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list ':' expression")); } - { // tuple ':' + { // tuple ':' expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple ':'")); + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple ':' expression")); Token * _literal; expr_ty a; + expr_ty expression_var; if ( (a = tuple_rule(p)) // tuple && (_literal = _PyPegen_expect_token(p, 11)) // token=':' + && + (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple ':'")); + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple ':' expression")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -14731,18 +14735,19 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple ':'")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple ':' expression")); } - { // star_named_expression ',' star_named_expressions* ':' + { // star_named_expression ',' star_named_expressions* ':' expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression")); Token * _literal; Token * _literal_1; asdl_seq * _loop0_127_var; expr_ty a; + expr_ty expression_var; if ( (a = star_named_expression_rule(p)) // star_named_expression && @@ -14751,9 +14756,11 @@ invalid_assignment_rule(Parser *p) (_loop0_127_var = _loop0_127_rule(p)) // star_named_expressions* && (_literal_1 = _PyPegen_expect_token(p, 11)) // token=':' + && + (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -14764,17 +14771,15 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions* ':'")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_named_expression ',' star_named_expressions* ':' expression")); } - { // expression ':' expression ['=' annotated_rhs] + { // expression ':' expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); Token * _literal; - void *_opt_var; - UNUSED(_opt_var); // Silence compiler warnings expr_ty a; expr_ty expression_var; if ( @@ -14783,11 +14788,9 @@ invalid_assignment_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (expression_var = expression_rule(p)) // expression - && - (_opt_var = _tmp_128_rule(p), 1) // ['=' annotated_rhs] ) { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "expression ':' expression")); _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "illegal target for annotation" ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -14798,7 +14801,7 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression ['=' annotated_rhs]")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "expression ':' expression")); } { // ((star_targets '='))* star_expressions '=' if (p->error_indicator) { @@ -14807,10 +14810,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* star_expressions '='")); Token * _literal; - asdl_seq * _loop0_129_var; + asdl_seq * _loop0_128_var; expr_ty a; if ( - (_loop0_129_var = _loop0_129_rule(p)) // ((star_targets '='))* + (_loop0_128_var = _loop0_128_rule(p)) // ((star_targets '='))* && (a = star_expressions_rule(p)) // star_expressions && @@ -14837,10 +14840,10 @@ invalid_assignment_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "((star_targets '='))* yield_expr '='")); Token * _literal; - asdl_seq * _loop0_130_var; + asdl_seq * _loop0_129_var; expr_ty a; if ( - (_loop0_130_var = _loop0_130_rule(p)) // ((star_targets '='))* + (_loop0_129_var = _loop0_129_rule(p)) // ((star_targets '='))* && (a = yield_expr_rule(p)) // yield_expr && @@ -14866,7 +14869,7 @@ invalid_assignment_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); - void *_tmp_131_var; + void *_tmp_130_var; expr_ty a; AugOperator* augassign_var; if ( @@ -14874,7 +14877,7 @@ invalid_assignment_rule(Parser *p) && (augassign_var = augassign_rule(p)) // augassign && - (_tmp_131_var = _tmp_131_rule(p)) // yield_expr | star_expressions + (_tmp_130_var = _tmp_130_rule(p)) // yield_expr | star_expressions ) { D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions augassign (yield_expr | star_expressions)")); @@ -15000,11 +15003,11 @@ invalid_comprehension_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_comprehension[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('[' | '(' | '{') starred_expression for_if_clauses")); - void *_tmp_132_var; + void *_tmp_131_var; expr_ty a; asdl_seq* for_if_clauses_var; if ( - (_tmp_132_var = _tmp_132_rule(p)) // '[' | '(' | '{' + (_tmp_131_var = _tmp_131_rule(p)) // '[' | '(' | '{' && (a = starred_expression_rule(p)) // starred_expression && @@ -15101,13 +15104,13 @@ invalid_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default* (slash_with_default | param_with_default+) param_no_default")); - asdl_seq * _loop0_133_var; - void *_tmp_134_var; + asdl_seq * _loop0_132_var; + void *_tmp_133_var; arg_ty param_no_default_var; if ( - (_loop0_133_var = _loop0_133_rule(p)) // param_no_default* + (_loop0_132_var = _loop0_132_rule(p)) // param_no_default* && - (_tmp_134_var = _tmp_134_rule(p)) // slash_with_default | param_with_default+ + (_tmp_133_var = _tmp_133_rule(p)) // slash_with_default | param_with_default+ && (param_no_default_var = param_no_default_rule(p)) // param_no_default ) @@ -15149,13 +15152,13 @@ invalid_lambda_parameters_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> invalid_lambda_parameters[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default* (lambda_slash_with_default | lambda_param_with_default+) lambda_param_no_default")); - asdl_seq * _loop0_135_var; - void *_tmp_136_var; + asdl_seq * _loop0_134_var; + void *_tmp_135_var; arg_ty lambda_param_no_default_var; if ( - (_loop0_135_var = _loop0_135_rule(p)) // lambda_param_no_default* + (_loop0_134_var = _loop0_134_rule(p)) // lambda_param_no_default* && - (_tmp_136_var = _tmp_136_rule(p)) // lambda_slash_with_default | lambda_param_with_default+ + (_tmp_135_var = _tmp_135_rule(p)) // lambda_slash_with_default | lambda_param_with_default+ && (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default ) @@ -15197,11 +15200,11 @@ invalid_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); Token * _literal; - void *_tmp_137_var; + void *_tmp_136_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_137_var = _tmp_137_rule(p)) // ')' | ',' (')' | '**') + (_tmp_136_var = _tmp_136_rule(p)) // ')' | ',' (')' | '**') ) { D(fprintf(stderr, "%*c+ invalid_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (')' | ',' (')' | '**'))")); @@ -15271,11 +15274,11 @@ invalid_lambda_star_etc_rule(Parser *p) } D(fprintf(stderr, "%*c> invalid_lambda_star_etc[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); Token * _literal; - void *_tmp_138_var; + void *_tmp_137_var; if ( (_literal = _PyPegen_expect_token(p, 16)) // token='*' && - (_tmp_138_var = _tmp_138_rule(p)) // ':' | ',' (':' | '**') + (_tmp_137_var = _tmp_137_rule(p)) // ':' | ',' (':' | '**') ) { D(fprintf(stderr, "%*c+ invalid_lambda_star_etc[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'*' (':' | ',' (':' | '**'))")); @@ -16784,12 +16787,12 @@ _loop1_22_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_22[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_139_var; + void *_tmp_138_var; while ( - (_tmp_139_var = _tmp_139_rule(p)) // star_targets '=' + (_tmp_138_var = _tmp_138_rule(p)) // star_targets '=' ) { - _res = _tmp_139_var; + _res = _tmp_138_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -17292,12 +17295,12 @@ _loop0_31_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_31[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_140_var; + void *_tmp_139_var; while ( - (_tmp_140_var = _tmp_140_rule(p)) // '.' | '...' + (_tmp_139_var = _tmp_139_rule(p)) // '.' | '...' ) { - _res = _tmp_140_var; + _res = _tmp_139_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -17358,12 +17361,12 @@ _loop1_32_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_32[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('.' | '...')")); - void *_tmp_141_var; + void *_tmp_140_var; while ( - (_tmp_141_var = _tmp_141_rule(p)) // '.' | '...' + (_tmp_140_var = _tmp_140_rule(p)) // '.' | '...' ) { - _res = _tmp_141_var; + _res = _tmp_140_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19520,12 +19523,12 @@ _loop1_68_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_68[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('@' named_expression NEWLINE)")); - void *_tmp_142_var; + void *_tmp_141_var; while ( - (_tmp_142_var = _tmp_142_rule(p)) // '@' named_expression NEWLINE + (_tmp_141_var = _tmp_141_rule(p)) // '@' named_expression NEWLINE ) { - _res = _tmp_142_var; + _res = _tmp_141_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19752,12 +19755,12 @@ _loop1_72_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_72[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_expression)")); - void *_tmp_143_var; + void *_tmp_142_var; while ( - (_tmp_143_var = _tmp_143_rule(p)) // ',' star_expression + (_tmp_142_var = _tmp_142_rule(p)) // ',' star_expression ) { - _res = _tmp_143_var; + _res = _tmp_142_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -19937,12 +19940,12 @@ _loop1_75_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_75[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' expression)")); - void *_tmp_144_var; + void *_tmp_143_var; while ( - (_tmp_144_var = _tmp_144_rule(p)) // ',' expression + (_tmp_143_var = _tmp_143_rule(p)) // ',' expression ) { - _res = _tmp_144_var; + _res = _tmp_143_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -20967,12 +20970,12 @@ _loop1_90_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_90[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('or' conjunction)")); - void *_tmp_145_var; + void *_tmp_144_var; while ( - (_tmp_145_var = _tmp_145_rule(p)) // 'or' conjunction + (_tmp_144_var = _tmp_144_rule(p)) // 'or' conjunction ) { - _res = _tmp_145_var; + _res = _tmp_144_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21038,12 +21041,12 @@ _loop1_91_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop1_91[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('and' inversion)")); - void *_tmp_146_var; + void *_tmp_145_var; while ( - (_tmp_146_var = _tmp_146_rule(p)) // 'and' inversion + (_tmp_145_var = _tmp_145_rule(p)) // 'and' inversion ) { - _res = _tmp_146_var; + _res = _tmp_145_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -21959,12 +21962,12 @@ _loop0_106_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_106[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_147_var; + void *_tmp_146_var; while ( - (_tmp_147_var = _tmp_147_rule(p)) // 'if' disjunction + (_tmp_146_var = _tmp_146_rule(p)) // 'if' disjunction ) { - _res = _tmp_147_var; + _res = _tmp_146_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22025,12 +22028,12 @@ _loop0_107_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_107[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "('if' disjunction)")); - void *_tmp_148_var; + void *_tmp_147_var; while ( - (_tmp_148_var = _tmp_148_rule(p)) // 'if' disjunction + (_tmp_147_var = _tmp_147_rule(p)) // 'if' disjunction ) { - _res = _tmp_148_var; + _res = _tmp_147_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -22635,12 +22638,12 @@ _loop0_118_rule(Parser *p) return NULL; } D(fprintf(stderr, "%*c> _loop0_118[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(',' star_target)")); - void *_tmp_149_var; + void *_tmp_148_var; while ( - (_tmp_149_var = _tmp_149_rule(p)) // ',' star_target + (_tmp_148_var = _tmp_148_rule(p)) // ',' star_target ) { - _res = _tmp_149_var; + _res = _tmp_148_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -23178,48 +23181,9 @@ _loop0_127_rule(Parser *p) return _seq; } -// _tmp_128: '=' annotated_rhs -static void * -_tmp_128_rule(Parser *p) -{ - D(p->level++); - if (p->error_indicator) { - D(p->level--); - return NULL; - } - void * _res = NULL; - int _mark = p->mark; - { // '=' annotated_rhs - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> _tmp_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - Token * _literal; - expr_ty annotated_rhs_var; - if ( - (_literal = _PyPegen_expect_token(p, 22)) // token='=' - && - (annotated_rhs_var = annotated_rhs_rule(p)) // annotated_rhs - ) - { - D(fprintf(stderr, "%*c+ _tmp_128[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'=' annotated_rhs")); - _res = _PyPegen_dummy_name(p, _literal, annotated_rhs_var); - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_128[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'=' annotated_rhs")); - } - _res = NULL; - done: - D(p->level--); - return _res; -} - -// _loop0_129: (star_targets '=') +// _loop0_128: (star_targets '=') static asdl_seq * -_loop0_129_rule(Parser *p) +_loop0_128_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23243,13 +23207,13 @@ _loop0_129_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_150_var; + D(fprintf(stderr, "%*c> _loop0_128[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_149_var; while ( - (_tmp_150_var = _tmp_150_rule(p)) // star_targets '=' + (_tmp_149_var = _tmp_149_rule(p)) // star_targets '=' ) { - _res = _tmp_150_var; + _res = _tmp_149_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -23265,7 +23229,7 @@ _loop0_129_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_128[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23278,14 +23242,14 @@ _loop0_129_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_129_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_128_type, _seq); D(p->level--); return _seq; } -// _loop0_130: (star_targets '=') +// _loop0_129: (star_targets '=') static asdl_seq * -_loop0_130_rule(Parser *p) +_loop0_129_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23309,13 +23273,13 @@ _loop0_130_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); - void *_tmp_151_var; + D(fprintf(stderr, "%*c> _loop0_129[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "(star_targets '=')")); + void *_tmp_150_var; while ( - (_tmp_151_var = _tmp_151_rule(p)) // star_targets '=' + (_tmp_150_var = _tmp_150_rule(p)) // star_targets '=' ) { - _res = _tmp_151_var; + _res = _tmp_150_var; if (_n == _children_capacity) { _children_capacity *= 2; void **_new_children = PyMem_Realloc(_children, _children_capacity*sizeof(void *)); @@ -23331,7 +23295,7 @@ _loop0_130_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_130[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_129[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "(star_targets '=')")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23344,14 +23308,14 @@ _loop0_130_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_130_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_129_type, _seq); D(p->level--); return _seq; } -// _tmp_131: yield_expr | star_expressions +// _tmp_130: yield_expr | star_expressions static void * -_tmp_131_rule(Parser *p) +_tmp_130_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23365,18 +23329,18 @@ _tmp_131_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "yield_expr")); expr_ty yield_expr_var; if ( (yield_expr_var = yield_expr_rule(p)) // yield_expr ) { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "yield_expr")); _res = yield_expr_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "yield_expr")); } { // star_expressions @@ -23384,18 +23348,18 @@ _tmp_131_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c> _tmp_130[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_expressions")); expr_ty star_expressions_var; if ( (star_expressions_var = star_expressions_rule(p)) // star_expressions ) { - D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); + D(fprintf(stderr, "%*c+ _tmp_130[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_expressions")); _res = star_expressions_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_130[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_expressions")); } _res = NULL; @@ -23404,9 +23368,9 @@ _tmp_131_rule(Parser *p) return _res; } -// _tmp_132: '[' | '(' | '{' +// _tmp_131: '[' | '(' | '{' static void * -_tmp_132_rule(Parser *p) +_tmp_131_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23420,18 +23384,18 @@ _tmp_132_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'['")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 9)) // token='[' ) { - D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'['")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'['")); } { // '(' @@ -23439,18 +23403,18 @@ _tmp_132_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'('")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 7)) // token='(' ) { - D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'('")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'('")); } { // '{' @@ -23458,18 +23422,18 @@ _tmp_132_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c> _tmp_131[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'{'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 25)) // token='{' ) { - D(fprintf(stderr, "%*c+ _tmp_132[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); + D(fprintf(stderr, "%*c+ _tmp_131[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'{'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_132[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_131[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'{'")); } _res = NULL; @@ -23478,9 +23442,9 @@ _tmp_132_rule(Parser *p) return _res; } -// _loop0_133: param_no_default +// _loop0_132: param_no_default static asdl_seq * -_loop0_133_rule(Parser *p) +_loop0_132_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23504,7 +23468,7 @@ _loop0_133_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); + D(fprintf(stderr, "%*c> _loop0_132[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_no_default")); arg_ty param_no_default_var; while ( (param_no_default_var = param_no_default_rule(p)) // param_no_default @@ -23526,7 +23490,7 @@ _loop0_133_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_133[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_132[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23539,14 +23503,14 @@ _loop0_133_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_133_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_132_type, _seq); D(p->level--); return _seq; } -// _tmp_134: slash_with_default | param_with_default+ +// _tmp_133: slash_with_default | param_with_default+ static void * -_tmp_134_rule(Parser *p) +_tmp_133_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23560,18 +23524,18 @@ _tmp_134_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "slash_with_default")); SlashWithDefault* slash_with_default_var; if ( (slash_with_default_var = slash_with_default_rule(p)) // slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "slash_with_default")); _res = slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "slash_with_default")); } { // param_with_default+ @@ -23579,18 +23543,18 @@ _tmp_134_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - asdl_seq * _loop1_152_var; + D(fprintf(stderr, "%*c> _tmp_133[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + asdl_seq * _loop1_151_var; if ( - (_loop1_152_var = _loop1_152_rule(p)) // param_with_default+ + (_loop1_151_var = _loop1_151_rule(p)) // param_with_default+ ) { - D(fprintf(stderr, "%*c+ _tmp_134[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); - _res = _loop1_152_var; + D(fprintf(stderr, "%*c+ _tmp_133[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "param_with_default+")); + _res = _loop1_151_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_134[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_133[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default+")); } _res = NULL; @@ -23599,9 +23563,9 @@ _tmp_134_rule(Parser *p) return _res; } -// _loop0_135: lambda_param_no_default +// _loop0_134: lambda_param_no_default static asdl_seq * -_loop0_135_rule(Parser *p) +_loop0_134_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23625,7 +23589,7 @@ _loop0_135_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop0_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); + D(fprintf(stderr, "%*c> _loop0_134[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_no_default")); arg_ty lambda_param_no_default_var; while ( (lambda_param_no_default_var = lambda_param_no_default_rule(p)) // lambda_param_no_default @@ -23647,7 +23611,7 @@ _loop0_135_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop0_135[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop0_134[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_no_default")); } asdl_seq *_seq = _Py_asdl_seq_new(_n, p->arena); @@ -23660,14 +23624,14 @@ _loop0_135_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop0_135_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop0_134_type, _seq); D(p->level--); return _seq; } -// _tmp_136: lambda_slash_with_default | lambda_param_with_default+ +// _tmp_135: lambda_slash_with_default | lambda_param_with_default+ static void * -_tmp_136_rule(Parser *p) +_tmp_135_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23681,18 +23645,18 @@ _tmp_136_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); SlashWithDefault* lambda_slash_with_default_var; if ( (lambda_slash_with_default_var = lambda_slash_with_default_rule(p)) // lambda_slash_with_default ) { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_slash_with_default")); _res = lambda_slash_with_default_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_slash_with_default")); } { // lambda_param_with_default+ @@ -23700,18 +23664,18 @@ _tmp_136_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - asdl_seq * _loop1_153_var; + D(fprintf(stderr, "%*c> _tmp_135[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + asdl_seq * _loop1_152_var; if ( - (_loop1_153_var = _loop1_153_rule(p)) // lambda_param_with_default+ + (_loop1_152_var = _loop1_152_rule(p)) // lambda_param_with_default+ ) { - D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); - _res = _loop1_153_var; + D(fprintf(stderr, "%*c+ _tmp_135[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default+")); + _res = _loop1_152_var; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_135[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default+")); } _res = NULL; @@ -23720,9 +23684,9 @@ _tmp_136_rule(Parser *p) return _res; } -// _tmp_137: ')' | ',' (')' | '**') +// _tmp_136: ')' | ',' (')' | '**') static void * -_tmp_137_rule(Parser *p) +_tmp_136_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23736,18 +23700,18 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // ',' (')' | '**') @@ -23755,21 +23719,21 @@ _tmp_137_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + D(fprintf(stderr, "%*c> _tmp_136[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); Token * _literal; - void *_tmp_154_var; + void *_tmp_153_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_154_var = _tmp_154_rule(p)) // ')' | '**' + (_tmp_153_var = _tmp_153_rule(p)) // ')' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_154_var); + D(fprintf(stderr, "%*c+ _tmp_136[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (')' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_153_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_136[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (')' | '**')")); } _res = NULL; @@ -23778,9 +23742,9 @@ _tmp_137_rule(Parser *p) return _res; } -// _tmp_138: ':' | ',' (':' | '**') +// _tmp_137: ':' | ',' (':' | '**') static void * -_tmp_138_rule(Parser *p) +_tmp_137_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23794,18 +23758,18 @@ _tmp_138_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // ',' (':' | '**') @@ -23813,21 +23777,21 @@ _tmp_138_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + D(fprintf(stderr, "%*c> _tmp_137[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); Token * _literal; - void *_tmp_155_var; + void *_tmp_154_var; if ( (_literal = _PyPegen_expect_token(p, 12)) // token=',' && - (_tmp_155_var = _tmp_155_rule(p)) // ':' | '**' + (_tmp_154_var = _tmp_154_rule(p)) // ':' | '**' ) { - D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); - _res = _PyPegen_dummy_name(p, _literal, _tmp_155_var); + D(fprintf(stderr, "%*c+ _tmp_137[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' (':' | '**')")); + _res = _PyPegen_dummy_name(p, _literal, _tmp_154_var); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_137[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' (':' | '**')")); } _res = NULL; @@ -23836,9 +23800,9 @@ _tmp_138_rule(Parser *p) return _res; } -// _tmp_139: star_targets '=' +// _tmp_138: star_targets '=' static void * -_tmp_139_rule(Parser *p) +_tmp_138_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23852,7 +23816,7 @@ _tmp_139_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_138[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty z; if ( @@ -23861,7 +23825,7 @@ _tmp_139_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_138[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -23871,7 +23835,7 @@ _tmp_139_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_138[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -23880,9 +23844,9 @@ _tmp_139_rule(Parser *p) return _res; } -// _tmp_140: '.' | '...' +// _tmp_139: '.' | '...' static void * -_tmp_140_rule(Parser *p) +_tmp_139_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23896,18 +23860,18 @@ _tmp_140_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23915,18 +23879,18 @@ _tmp_140_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_139[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_139[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_139[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23935,9 +23899,9 @@ _tmp_140_rule(Parser *p) return _res; } -// _tmp_141: '.' | '...' +// _tmp_140: '.' | '...' static void * -_tmp_141_rule(Parser *p) +_tmp_140_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -23951,18 +23915,18 @@ _tmp_141_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'.'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 23)) // token='.' ) { - D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'.'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'.'")); } { // '...' @@ -23970,18 +23934,18 @@ _tmp_141_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c> _tmp_140[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'...'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 52)) // token='...' ) { - D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); + D(fprintf(stderr, "%*c+ _tmp_140[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'...'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_140[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'...'")); } _res = NULL; @@ -23990,9 +23954,9 @@ _tmp_141_rule(Parser *p) return _res; } -// _tmp_142: '@' named_expression NEWLINE +// _tmp_141: '@' named_expression NEWLINE static void * -_tmp_142_rule(Parser *p) +_tmp_141_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24006,7 +23970,7 @@ _tmp_142_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c> _tmp_141[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); Token * _literal; expr_ty f; Token * newline_var; @@ -24018,7 +23982,7 @@ _tmp_142_rule(Parser *p) (newline_var = _PyPegen_expect_token(p, NEWLINE)) // token='NEWLINE' ) { - D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); + D(fprintf(stderr, "%*c+ _tmp_141[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'@' named_expression NEWLINE")); _res = f; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24028,7 +23992,7 @@ _tmp_142_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_141[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'@' named_expression NEWLINE")); } _res = NULL; @@ -24037,9 +24001,9 @@ _tmp_142_rule(Parser *p) return _res; } -// _tmp_143: ',' star_expression +// _tmp_142: ',' star_expression static void * -_tmp_143_rule(Parser *p) +_tmp_142_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24053,7 +24017,7 @@ _tmp_143_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c> _tmp_142[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_expression")); Token * _literal; expr_ty c; if ( @@ -24062,7 +24026,7 @@ _tmp_143_rule(Parser *p) (c = star_expression_rule(p)) // star_expression ) { - D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); + D(fprintf(stderr, "%*c+ _tmp_142[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24072,7 +24036,7 @@ _tmp_143_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_142[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_expression")); } _res = NULL; @@ -24081,9 +24045,9 @@ _tmp_143_rule(Parser *p) return _res; } -// _tmp_144: ',' expression +// _tmp_143: ',' expression static void * -_tmp_144_rule(Parser *p) +_tmp_143_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24097,7 +24061,7 @@ _tmp_144_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c> _tmp_143[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' expression")); Token * _literal; expr_ty c; if ( @@ -24106,7 +24070,7 @@ _tmp_144_rule(Parser *p) (c = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); + D(fprintf(stderr, "%*c+ _tmp_143[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' expression")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24116,7 +24080,7 @@ _tmp_144_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_143[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' expression")); } _res = NULL; @@ -24125,9 +24089,9 @@ _tmp_144_rule(Parser *p) return _res; } -// _tmp_145: 'or' conjunction +// _tmp_144: 'or' conjunction static void * -_tmp_145_rule(Parser *p) +_tmp_144_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24141,7 +24105,7 @@ _tmp_145_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c> _tmp_144[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); Token * _keyword; expr_ty c; if ( @@ -24150,7 +24114,7 @@ _tmp_145_rule(Parser *p) (c = conjunction_rule(p)) // conjunction ) { - D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); + D(fprintf(stderr, "%*c+ _tmp_144[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'or' conjunction")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24160,7 +24124,7 @@ _tmp_145_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_144[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'or' conjunction")); } _res = NULL; @@ -24169,9 +24133,9 @@ _tmp_145_rule(Parser *p) return _res; } -// _tmp_146: 'and' inversion +// _tmp_145: 'and' inversion static void * -_tmp_146_rule(Parser *p) +_tmp_145_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24185,7 +24149,7 @@ _tmp_146_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c> _tmp_145[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'and' inversion")); Token * _keyword; expr_ty c; if ( @@ -24194,7 +24158,7 @@ _tmp_146_rule(Parser *p) (c = inversion_rule(p)) // inversion ) { - D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); + D(fprintf(stderr, "%*c+ _tmp_145[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'and' inversion")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24204,7 +24168,7 @@ _tmp_146_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_145[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'and' inversion")); } _res = NULL; @@ -24213,9 +24177,9 @@ _tmp_146_rule(Parser *p) return _res; } -// _tmp_147: 'if' disjunction +// _tmp_146: 'if' disjunction static void * -_tmp_147_rule(Parser *p) +_tmp_146_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24229,7 +24193,7 @@ _tmp_147_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_146[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -24238,7 +24202,7 @@ _tmp_147_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_146[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24248,7 +24212,7 @@ _tmp_147_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_146[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -24257,9 +24221,9 @@ _tmp_147_rule(Parser *p) return _res; } -// _tmp_148: 'if' disjunction +// _tmp_147: 'if' disjunction static void * -_tmp_148_rule(Parser *p) +_tmp_147_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24273,7 +24237,7 @@ _tmp_148_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c> _tmp_147[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); Token * _keyword; expr_ty z; if ( @@ -24282,7 +24246,7 @@ _tmp_148_rule(Parser *p) (z = disjunction_rule(p)) // disjunction ) { - D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); + D(fprintf(stderr, "%*c+ _tmp_147[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'if' disjunction")); _res = z; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24292,7 +24256,7 @@ _tmp_148_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_147[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'if' disjunction")); } _res = NULL; @@ -24301,9 +24265,9 @@ _tmp_148_rule(Parser *p) return _res; } -// _tmp_149: ',' star_target +// _tmp_148: ',' star_target static void * -_tmp_149_rule(Parser *p) +_tmp_148_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24317,7 +24281,7 @@ _tmp_149_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c> _tmp_148[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "',' star_target")); Token * _literal; expr_ty c; if ( @@ -24326,7 +24290,7 @@ _tmp_149_rule(Parser *p) (c = star_target_rule(p)) // star_target ) { - D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); + D(fprintf(stderr, "%*c+ _tmp_148[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "',' star_target")); _res = c; if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; @@ -24336,7 +24300,7 @@ _tmp_149_rule(Parser *p) goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_148[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "',' star_target")); } _res = NULL; @@ -24345,9 +24309,9 @@ _tmp_149_rule(Parser *p) return _res; } -// _tmp_150: star_targets '=' +// _tmp_149: star_targets '=' static void * -_tmp_150_rule(Parser *p) +_tmp_149_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24361,7 +24325,7 @@ _tmp_150_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_149[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -24370,12 +24334,12 @@ _tmp_150_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_149[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_149[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -24384,9 +24348,9 @@ _tmp_150_rule(Parser *p) return _res; } -// _tmp_151: star_targets '=' +// _tmp_150: star_targets '=' static void * -_tmp_151_rule(Parser *p) +_tmp_150_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24400,7 +24364,7 @@ _tmp_151_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c> _tmp_150[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "star_targets '='")); Token * _literal; expr_ty star_targets_var; if ( @@ -24409,12 +24373,12 @@ _tmp_151_rule(Parser *p) (_literal = _PyPegen_expect_token(p, 22)) // token='=' ) { - D(fprintf(stderr, "%*c+ _tmp_151[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); + D(fprintf(stderr, "%*c+ _tmp_150[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "star_targets '='")); _res = _PyPegen_dummy_name(p, star_targets_var, _literal); goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_151[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_150[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "star_targets '='")); } _res = NULL; @@ -24423,9 +24387,9 @@ _tmp_151_rule(Parser *p) return _res; } -// _loop1_152: param_with_default +// _loop1_151: param_with_default static asdl_seq * -_loop1_152_rule(Parser *p) +_loop1_151_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24449,7 +24413,7 @@ _loop1_152_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); + D(fprintf(stderr, "%*c> _loop1_151[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "param_with_default")); NameDefaultPair* param_with_default_var; while ( (param_with_default_var = param_with_default_rule(p)) // param_with_default @@ -24471,7 +24435,7 @@ _loop1_152_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_152[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_151[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -24489,14 +24453,14 @@ _loop1_152_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_152_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_151_type, _seq); D(p->level--); return _seq; } -// _loop1_153: lambda_param_with_default +// _loop1_152: lambda_param_with_default static asdl_seq * -_loop1_153_rule(Parser *p) +_loop1_152_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24520,7 +24484,7 @@ _loop1_153_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _loop1_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); + D(fprintf(stderr, "%*c> _loop1_152[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "lambda_param_with_default")); NameDefaultPair* lambda_param_with_default_var; while ( (lambda_param_with_default_var = lambda_param_with_default_rule(p)) // lambda_param_with_default @@ -24542,7 +24506,7 @@ _loop1_153_rule(Parser *p) _mark = p->mark; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _loop1_153[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _loop1_152[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "lambda_param_with_default")); } if (_n == 0 || p->error_indicator) { @@ -24560,14 +24524,14 @@ _loop1_153_rule(Parser *p) } for (int i = 0; i < _n; i++) asdl_seq_SET(_seq, i, _children[i]); PyMem_Free(_children); - _PyPegen_insert_memo(p, _start_mark, _loop1_153_type, _seq); + _PyPegen_insert_memo(p, _start_mark, _loop1_152_type, _seq); D(p->level--); return _seq; } -// _tmp_154: ')' | '**' +// _tmp_153: ')' | '**' static void * -_tmp_154_rule(Parser *p) +_tmp_153_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24581,18 +24545,18 @@ _tmp_154_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "')'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 8)) // token=')' ) { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "')'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "')'")); } { // '**' @@ -24600,18 +24564,18 @@ _tmp_154_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_153[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_153[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_153[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; @@ -24620,9 +24584,9 @@ _tmp_154_rule(Parser *p) return _res; } -// _tmp_155: ':' | '**' +// _tmp_154: ':' | '**' static void * -_tmp_155_rule(Parser *p) +_tmp_154_rule(Parser *p) { D(p->level++); if (p->error_indicator) { @@ -24636,18 +24600,18 @@ _tmp_155_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "':'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 11)) // token=':' ) { - D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "':'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "':'")); } { // '**' @@ -24655,18 +24619,18 @@ _tmp_155_rule(Parser *p) D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> _tmp_155[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c> _tmp_154[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'**'")); Token * _literal; if ( (_literal = _PyPegen_expect_token(p, 35)) // token='**' ) { - D(fprintf(stderr, "%*c+ _tmp_155[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); + D(fprintf(stderr, "%*c+ _tmp_154[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'**'")); _res = _literal; goto done; } p->mark = _mark; - D(fprintf(stderr, "%*c%s _tmp_155[%d-%d]: %s failed!\n", p->level, ' ', + D(fprintf(stderr, "%*c%s _tmp_154[%d-%d]: %s failed!\n", p->level, ' ', p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'**'")); } _res = NULL; From webhook-mailer at python.org Thu Jun 25 20:03:58 2020 From: webhook-mailer at python.org (Raymond Hettinger) Date: Fri, 26 Jun 2020 00:03:58 -0000 Subject: [Python-checkins] Improve code organization for the random module (GH-21161) Message-ID: https://github.com/python/cpython/commit/ef19bad7d6da99575d66c1f5dc8fd6ac57e92f6e commit: ef19bad7d6da99575d66c1f5dc8fd6ac57e92f6e branch: master author: Raymond Hettinger committer: GitHub date: 2020-06-25T17:03:50-07:00 summary: Improve code organization for the random module (GH-21161) files: M Lib/random.py diff --git a/Lib/random.py b/Lib/random.py index ae7b5cf4e72e8..a6454f520df0a 100644 --- a/Lib/random.py +++ b/Lib/random.py @@ -1,5 +1,9 @@ """Random variable generators. + bytes + ----- + uniform bytes (values between 0 and 255) + integers -------- uniform within range @@ -37,6 +41,10 @@ """ +# Translated by Guido van Rossum from C source provided by +# Adrian Baddeley. Adapted by Raymond Hettinger for use with +# the Mersenne Twister and os.urandom() core generators. + from warnings import warn as _warn from math import log as _log, exp as _exp, pi as _pi, e as _e, ceil as _ceil from math import sqrt as _sqrt, acos as _acos, cos as _cos, sin as _sin @@ -46,6 +54,7 @@ from itertools import accumulate as _accumulate, repeat as _repeat from bisect import bisect as _bisect import os as _os +import _random try: # hashlib is pretty heavy to load, try lean internal module first @@ -54,7 +63,6 @@ # fallback to official implementation from hashlib import sha512 as _sha512 - __all__ = [ "Random", "SystemRandom", @@ -89,13 +97,6 @@ RECIP_BPF = 2 ** -BPF -# Translated by Guido van Rossum from C source provided by -# Adrian Baddeley. Adapted by Raymond Hettinger for use with -# the Mersenne Twister and os.urandom() core generators. - -import _random - - class Random(_random.Random): """Random number generator base class used by bound module functions. @@ -121,26 +122,6 @@ def __init__(self, x=None): self.seed(x) self.gauss_next = None - def __init_subclass__(cls, /, **kwargs): - """Control how subclasses generate random integers. - - The algorithm a subclass can use depends on the random() and/or - getrandbits() implementation available to it and determines - whether it can generate random integers from arbitrarily large - ranges. - """ - - for c in cls.__mro__: - if '_randbelow' in c.__dict__: - # just inherit it - break - if 'getrandbits' in c.__dict__: - cls._randbelow = cls._randbelow_with_getrandbits - break - if 'random' in c.__dict__: - cls._randbelow = cls._randbelow_without_getrandbits - break - def seed(self, a=None, version=2): """Initialize internal state from a seed. @@ -210,14 +191,11 @@ def setstate(self, state): "Random.setstate() of version %s" % (version, self.VERSION)) - ## ---- Methods below this point do not need to be overridden when - ## ---- subclassing for the purpose of using a different core generator. - ## -------------------- bytes methods --------------------- + ## ------------------------------------------------------- + ## ---- Methods below this point do not need to be overridden or extended + ## ---- when subclassing for the purpose of using a different core generator. - def randbytes(self, n): - """Generate n random bytes.""" - return self.getrandbits(n * 8).to_bytes(n, 'little') ## -------------------- pickle support ------------------- @@ -233,6 +211,80 @@ def __setstate__(self, state): # for pickle def __reduce__(self): return self.__class__, (), self.getstate() + + ## ---- internal support method for evenly distributed integers ---- + + def __init_subclass__(cls, /, **kwargs): + """Control how subclasses generate random integers. + + The algorithm a subclass can use depends on the random() and/or + getrandbits() implementation available to it and determines + whether it can generate random integers from arbitrarily large + ranges. + """ + + for c in cls.__mro__: + if '_randbelow' in c.__dict__: + # just inherit it + break + if 'getrandbits' in c.__dict__: + cls._randbelow = cls._randbelow_with_getrandbits + break + if 'random' in c.__dict__: + cls._randbelow = cls._randbelow_without_getrandbits + break + + def _randbelow_with_getrandbits(self, n): + "Return a random int in the range [0,n). Returns 0 if n==0." + + if not n: + return 0 + getrandbits = self.getrandbits + k = n.bit_length() # don't use (n-1) here because n can be 1 + r = getrandbits(k) # 0 <= r < 2**k + while r >= n: + r = getrandbits(k) + return r + + def _randbelow_without_getrandbits(self, n, maxsize=1<= maxsize: + _warn("Underlying random() generator does not supply \n" + "enough bits to choose from a population range this large.\n" + "To remove the range limitation, add a getrandbits() method.") + return _floor(random() * n) + if n == 0: + return 0 + rem = maxsize % n + limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 + r = random() + while r >= limit: + r = random() + return _floor(r * maxsize) % n + + _randbelow = _randbelow_with_getrandbits + + + ## -------------------------------------------------------- + ## ---- Methods below this point generate custom distributions + ## ---- based on the methods defined above. They do not + ## ---- directly touch the underlying generator and only + ## ---- access randomness through the methods: random(), + ## ---- getrandbits(), or _randbelow(). + + + ## -------------------- bytes methods --------------------- + + def randbytes(self, n): + """Generate n random bytes.""" + return self.getrandbits(n * 8).to_bytes(n, 'little') + + ## -------------------- integer methods ------------------- def randrange(self, start, stop=None, step=1): @@ -285,40 +337,6 @@ def randint(self, a, b): return self.randrange(a, b+1) - def _randbelow_with_getrandbits(self, n): - "Return a random int in the range [0,n). Returns 0 if n==0." - - if not n: - return 0 - getrandbits = self.getrandbits - k = n.bit_length() # don't use (n-1) here because n can be 1 - r = getrandbits(k) # 0 <= r < 2**k - while r >= n: - r = getrandbits(k) - return r - - def _randbelow_without_getrandbits(self, n, maxsize=1<= maxsize: - _warn("Underlying random() generator does not supply \n" - "enough bits to choose from a population range this large.\n" - "To remove the range limitation, add a getrandbits() method.") - return _floor(random() * n) - if n == 0: - return 0 - rem = maxsize % n - limit = (maxsize - rem) / maxsize # int(limit * maxsize) % n == 0 - r = random() - while r >= limit: - r = random() - return _floor(r * maxsize) % n - - _randbelow = _randbelow_with_getrandbits ## -------------------- sequence methods ------------------- @@ -479,16 +497,13 @@ def choices(self, population, weights=None, *, cum_weights=None, k=1): return [population[bisect(cum_weights, random() * total, 0, hi)] for i in _repeat(None, k)] - ## -------------------- real-valued distributions ------------------- - ## -------------------- uniform distribution ------------------- + ## -------------------- real-valued distributions ------------------- def uniform(self, a, b): "Get a random number in the range [a, b) or [a, b] depending on rounding." return a + (b - a) * self.random() - ## -------------------- triangular -------------------- - def triangular(self, low=0.0, high=1.0, mode=None): """Triangular distribution. @@ -509,16 +524,12 @@ def triangular(self, low=0.0, high=1.0, mode=None): low, high = high, low return low + (high - low) * _sqrt(u * c) - ## -------------------- normal distribution -------------------- - def normalvariate(self, mu, sigma): """Normal distribution. mu is the mean, and sigma is the standard deviation. """ - # mu = mean, sigma = standard deviation - # Uses Kinderman and Monahan method. Reference: Kinderman, # A.J. and Monahan, J.F., "Computer generation of random # variables using the ratio of uniform deviates", ACM Trans @@ -534,7 +545,43 @@ def normalvariate(self, mu, sigma): break return mu + z * sigma - ## -------------------- lognormal distribution -------------------- + def gauss(self, mu, sigma): + """Gaussian distribution. + + mu is the mean, and sigma is the standard deviation. This is + slightly faster than the normalvariate() function. + + Not thread-safe without a lock around calls. + + """ + # When x and y are two variables from [0, 1), uniformly + # distributed, then + # + # cos(2*pi*x)*sqrt(-2*log(1-y)) + # sin(2*pi*x)*sqrt(-2*log(1-y)) + # + # are two *independent* variables with normal distribution + # (mu = 0, sigma = 1). + # (Lambert Meertens) + # (corrected version; bug discovered by Mike Miller, fixed by LM) + + # Multithreading note: When two threads call this function + # simultaneously, it is possible that they will receive the + # same return value. The window is very small though. To + # avoid this, you have to use a lock around all calls. (I + # didn't want to slow this down in the serial case by using a + # lock here.) + + random = self.random + z = self.gauss_next + self.gauss_next = None + if z is None: + x2pi = random() * TWOPI + g2rad = _sqrt(-2.0 * _log(1.0 - random())) + z = _cos(x2pi) * g2rad + self.gauss_next = _sin(x2pi) * g2rad + + return mu + z * sigma def lognormvariate(self, mu, sigma): """Log normal distribution. @@ -546,8 +593,6 @@ def lognormvariate(self, mu, sigma): """ return _exp(self.normalvariate(mu, sigma)) - ## -------------------- exponential distribution -------------------- - def expovariate(self, lambd): """Exponential distribution. @@ -565,8 +610,6 @@ def expovariate(self, lambd): # possibility of taking the log of zero. return -_log(1.0 - self.random()) / lambd - ## -------------------- von Mises distribution -------------------- - def vonmisesvariate(self, mu, kappa): """Circular data distribution. @@ -576,10 +619,6 @@ def vonmisesvariate(self, mu, kappa): to a uniform random angle over the range 0 to 2*pi. """ - # mu: mean angle (in radians between 0 and 2*pi) - # kappa: concentration parameter kappa (>= 0) - # if kappa = 0 generate uniform random angle - # Based upon an algorithm published in: Fisher, N.I., # "Statistical Analysis of Circular Data", Cambridge # University Press, 1993. @@ -613,8 +652,6 @@ def vonmisesvariate(self, mu, kappa): return theta - ## -------------------- gamma distribution -------------------- - def gammavariate(self, alpha, beta): """Gamma distribution. Not the gamma function! @@ -627,7 +664,6 @@ def gammavariate(self, alpha, beta): math.gamma(alpha) * beta ** alpha """ - # alpha > 0, beta > 0, mean is alpha*beta, variance is alpha*beta**2 # Warning: a few older sources define the gamma distribution in terms @@ -681,61 +717,6 @@ def gammavariate(self, alpha, beta): break return x * beta - ## -------------------- Gauss (faster alternative) -------------------- - - def gauss(self, mu, sigma): - """Gaussian distribution. - - mu is the mean, and sigma is the standard deviation. This is - slightly faster than the normalvariate() function. - - Not thread-safe without a lock around calls. - - """ - - # When x and y are two variables from [0, 1), uniformly - # distributed, then - # - # cos(2*pi*x)*sqrt(-2*log(1-y)) - # sin(2*pi*x)*sqrt(-2*log(1-y)) - # - # are two *independent* variables with normal distribution - # (mu = 0, sigma = 1). - # (Lambert Meertens) - # (corrected version; bug discovered by Mike Miller, fixed by LM) - - # Multithreading note: When two threads call this function - # simultaneously, it is possible that they will receive the - # same return value. The window is very small though. To - # avoid this, you have to use a lock around all calls. (I - # didn't want to slow this down in the serial case by using a - # lock here.) - - random = self.random - z = self.gauss_next - self.gauss_next = None - if z is None: - x2pi = random() * TWOPI - g2rad = _sqrt(-2.0 * _log(1.0 - random())) - z = _cos(x2pi) * g2rad - self.gauss_next = _sin(x2pi) * g2rad - - return mu + z * sigma - - ## -------------------- beta -------------------- - ## See - ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html - ## for Ivan Frohne's insightful analysis of why the original implementation: - ## - ## def betavariate(self, alpha, beta): - ## # Discrete Event Simulation in C, pp 87-88. - ## - ## y = self.expovariate(alpha) - ## z = self.expovariate(1.0/beta) - ## return z/(y+z) - ## - ## was dead wrong, and how it probably got that way. - def betavariate(self, alpha, beta): """Beta distribution. @@ -743,6 +724,18 @@ def betavariate(self, alpha, beta): Returned values range between 0 and 1. """ + ## See + ## http://mail.python.org/pipermail/python-bugs-list/2001-January/003752.html + ## for Ivan Frohne's insightful analysis of why the original implementation: + ## + ## def betavariate(self, alpha, beta): + ## # Discrete Event Simulation in C, pp 87-88. + ## + ## y = self.expovariate(alpha) + ## z = self.expovariate(1.0/beta) + ## return z/(y+z) + ## + ## was dead wrong, and how it probably got that way. # This version due to Janne Sinkkonen, and matches all the std # texts (e.g., Knuth Vol 2 Ed 3 pg 134 "the beta distribution"). @@ -751,8 +744,6 @@ def betavariate(self, alpha, beta): return y / (y + self.gammavariate(beta, 1.0)) return 0.0 - ## -------------------- Pareto -------------------- - def paretovariate(self, alpha): """Pareto distribution. alpha is the shape parameter.""" # Jain, pg. 495 @@ -760,8 +751,6 @@ def paretovariate(self, alpha): u = 1.0 - self.random() return 1.0 / u ** (1.0 / alpha) - ## -------------------- Weibull -------------------- - def weibullvariate(self, alpha, beta): """Weibull distribution. @@ -774,14 +763,17 @@ def weibullvariate(self, alpha, beta): return alpha * (-_log(u)) ** (1.0 / beta) +## ------------------------------------------------------------------ ## --------------- Operating System Random Source ------------------ + class SystemRandom(Random): """Alternate random number generator using sources provided by the operating system (such as /dev/urandom on Unix or CryptGenRandom on Windows). Not available on all systems (see os.urandom() for details). + """ def random(self): @@ -812,7 +804,41 @@ def _notimplemented(self, *args, **kwds): getstate = setstate = _notimplemented -## -------------------- test program -------------------- +# ---------------------------------------------------------------------- +# Create one instance, seeded from current time, and export its methods +# as module-level functions. The functions share state across all uses +# (both in the user's code and in the Python libraries), but that's fine +# for most programs and is easier for the casual user than making them +# instantiate their own Random() instance. + +_inst = Random() +seed = _inst.seed +random = _inst.random +uniform = _inst.uniform +triangular = _inst.triangular +randint = _inst.randint +choice = _inst.choice +randrange = _inst.randrange +sample = _inst.sample +shuffle = _inst.shuffle +choices = _inst.choices +normalvariate = _inst.normalvariate +lognormvariate = _inst.lognormvariate +expovariate = _inst.expovariate +vonmisesvariate = _inst.vonmisesvariate +gammavariate = _inst.gammavariate +gauss = _inst.gauss +betavariate = _inst.betavariate +paretovariate = _inst.paretovariate +weibullvariate = _inst.weibullvariate +getstate = _inst.getstate +setstate = _inst.setstate +getrandbits = _inst.getrandbits +randbytes = _inst.randbytes + + +## ------------------------------------------------------ +## ----------------- test program ----------------------- def _test_generator(n, func, args): from statistics import stdev, fmean as mean @@ -849,36 +875,9 @@ def _test(N=2000): _test_generator(N, betavariate, (3.0, 3.0)) _test_generator(N, triangular, (0.0, 1.0, 1.0 / 3.0)) -# Create one instance, seeded from current time, and export its methods -# as module-level functions. The functions share state across all uses -# (both in the user's code and in the Python libraries), but that's fine -# for most programs and is easier for the casual user than making them -# instantiate their own Random() instance. -_inst = Random() -seed = _inst.seed -random = _inst.random -uniform = _inst.uniform -triangular = _inst.triangular -randint = _inst.randint -choice = _inst.choice -randrange = _inst.randrange -sample = _inst.sample -shuffle = _inst.shuffle -choices = _inst.choices -normalvariate = _inst.normalvariate -lognormvariate = _inst.lognormvariate -expovariate = _inst.expovariate -vonmisesvariate = _inst.vonmisesvariate -gammavariate = _inst.gammavariate -gauss = _inst.gauss -betavariate = _inst.betavariate -paretovariate = _inst.paretovariate -weibullvariate = _inst.weibullvariate -getstate = _inst.getstate -setstate = _inst.setstate -getrandbits = _inst.getrandbits -randbytes = _inst.randbytes +## ------------------------------------------------------ +## ------------------ fork support --------------------- if hasattr(_os, "fork"): _os.register_at_fork(after_in_child=_inst.seed) From webhook-mailer at python.org Fri Jun 26 07:24:32 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Fri, 26 Jun 2020 11:24:32 -0000 Subject: [Python-checkins] bpo-41084: Adjust message when an f-string expression causes a SyntaxError (GH-21084) Message-ID: https://github.com/python/cpython/commit/2e0a920e9eb540654c0bb2298143b00637dc5961 commit: 2e0a920e9eb540654c0bb2298143b00637dc5961 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-26T12:24:05+01:00 summary: bpo-41084: Adjust message when an f-string expression causes a SyntaxError (GH-21084) Prefix the error message with `fstring: `, when parsing an f-string expression throws a `SyntaxError`. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-23-15-10-19.bpo-41084.pt3y7F.rst M Lib/test/test_fstring.py M Parser/pegen.c diff --git a/Lib/test/test_fstring.py b/Lib/test/test_fstring.py index 7ffe01d2d8c31..0dc7dd8e254c3 100644 --- a/Lib/test/test_fstring.py +++ b/Lib/test/test_fstring.py @@ -524,7 +524,7 @@ def test_format_specifier_expressions(self): # This looks like a nested format spec. ]) - self.assertAllRaise(SyntaxError, "invalid syntax", + self.assertAllRaise(SyntaxError, "f-string: invalid syntax", [# Invalid syntax inside a nested spec. "f'{4:{/5}}'", ]) @@ -598,7 +598,7 @@ def test_parens_in_expressions(self): # are added around it. But we shouldn't go from an invalid # expression to a valid one. The added parens are just # supposed to allow whitespace (including newlines). - self.assertAllRaise(SyntaxError, 'invalid syntax', + self.assertAllRaise(SyntaxError, 'f-string: invalid syntax', ["f'{,}'", "f'{,}'", # this is (,), which is an error ]) @@ -716,7 +716,7 @@ def test_lambda(self): # lambda doesn't work without parens, because the colon # makes the parser think it's a format_spec - self.assertAllRaise(SyntaxError, 'invalid syntax', + self.assertAllRaise(SyntaxError, 'f-string: invalid syntax', ["f'{lambda x:x}'", ]) @@ -1194,6 +1194,10 @@ def test_walrus(self): self.assertEqual(f'{(x:=10)}', '10') self.assertEqual(x, 10) + def test_invalid_syntax_error_message(self): + with self.assertRaisesRegex(SyntaxError, "f-string: invalid syntax"): + compile("f'{a $ b}'", "?", "exec") + if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-23-15-10-19.bpo-41084.pt3y7F.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-15-10-19.bpo-41084.pt3y7F.rst new file mode 100644 index 0000000000000..cd349af770bd0 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-15-10-19.bpo-41084.pt3y7F.rst @@ -0,0 +1 @@ +Prefix the error message with 'f-string: ', when parsing an f-string expression which throws a :exc:`SyntaxError`. diff --git a/Parser/pegen.c b/Parser/pegen.c index 594754cee5d53..79fcd2f5999de 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -391,6 +391,21 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, PyObject *tmp = NULL; p->error_indicator = 1; + if (p->start_rule == Py_fstring_input) { + const char *fstring_msg = "f-string: "; + Py_ssize_t len = strlen(fstring_msg) + strlen(errmsg); + + char *new_errmsg = PyMem_RawMalloc(len + 1); // Lengths of both strings plus NULL character + if (!new_errmsg) { + return (void *) PyErr_NoMemory(); + } + + // Copy both strings into new buffer + memcpy(new_errmsg, fstring_msg, strlen(fstring_msg)); + memcpy(new_errmsg + strlen(fstring_msg), errmsg, strlen(errmsg)); + new_errmsg[len] = 0; + errmsg = new_errmsg; + } errstr = PyUnicode_FromFormatV(errmsg, va); if (!errstr) { goto error; @@ -427,11 +442,17 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, Py_DECREF(errstr); Py_DECREF(value); + if (p->start_rule == Py_fstring_input) { + PyMem_RawFree((void *)errmsg); + } return NULL; error: Py_XDECREF(errstr); Py_XDECREF(error_line); + if (p->start_rule == Py_fstring_input) { + PyMem_RawFree((void *)errmsg); + } return NULL; } From webhook-mailer at python.org Sat Jun 27 04:45:00 2020 From: webhook-mailer at python.org (Ned Deily) Date: Sat, 27 Jun 2020 08:45:00 -0000 Subject: [Python-checkins] bpo-41100: clarify NEWS item about macOS 11 support (GH-21174) Message-ID: https://github.com/python/cpython/commit/2f168c6356f92c38ae7751d2faf2b266a9356229 commit: 2f168c6356f92c38ae7751d2faf2b266a9356229 branch: master author: Ned Deily committer: GitHub date: 2020-06-27T04:44:56-04:00 summary: bpo-41100: clarify NEWS item about macOS 11 support (GH-21174) files: M Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst diff --git a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst index ded66b567a92d..d6bb616136690 100644 --- a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst +++ b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst @@ -1 +1,7 @@ -Support macOS 11 when building. +Fix configure error when building on macOS 11. +Note that the current Python release was released +shortly after the first developer preview of macOS +11 (Big Sur); there are other known issues with +building and running on the developer preview. +Big Sur is expected to be fully supported in a +future bugfix release of Python 3.8.x and with 3.9.0. \ No newline at end of file From webhook-mailer at python.org Sat Jun 27 04:52:54 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sat, 27 Jun 2020 08:52:54 -0000 Subject: [Python-checkins] bpo-41100: clarify NEWS item about macOS 11 support (GH-21174) Message-ID: https://github.com/python/cpython/commit/027bba2eada175358a7f34cad155b25e1b805082 commit: 027bba2eada175358a7f34cad155b25e1b805082 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-27T01:52:50-07:00 summary: bpo-41100: clarify NEWS item about macOS 11 support (GH-21174) (cherry picked from commit 2f168c6356f92c38ae7751d2faf2b266a9356229) Co-authored-by: Ned Deily files: M Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst diff --git a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst index ded66b567a92d..d6bb616136690 100644 --- a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst +++ b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst @@ -1 +1,7 @@ -Support macOS 11 when building. +Fix configure error when building on macOS 11. +Note that the current Python release was released +shortly after the first developer preview of macOS +11 (Big Sur); there are other known issues with +building and running on the developer preview. +Big Sur is expected to be fully supported in a +future bugfix release of Python 3.8.x and with 3.9.0. \ No newline at end of file From webhook-mailer at python.org Sat Jun 27 05:22:13 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Sat, 27 Jun 2020 09:22:13 -0000 Subject: [Python-checkins] bpo-41123: Remove Py_UNICODE_str* functions (GH-21164) Message-ID: https://github.com/python/cpython/commit/20a79021753ab26a5989e6d3397160e52973870e commit: 20a79021753ab26a5989e6d3397160e52973870e branch: master author: Inada Naoki committer: GitHub date: 2020-06-27T18:22:09+09:00 summary: bpo-41123: Remove Py_UNICODE_str* functions (GH-21164) They are undocumented and deprecated since Python 3.3. files: A Misc/NEWS.d/next/C API/2020-06-26-13-29-25.bpo-41123.bRa1oy.rst M Doc/whatsnew/3.10.rst M Include/cpython/unicodeobject.h M Objects/unicodeobject.c diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 060d5debf91a9..51e42ec6aba91 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -208,4 +208,18 @@ Removed * ``PyObject_AsCharBuffer()``, ``PyObject_AsReadBuffer()``, ``PyObject_CheckReadBuffer()``, and ``PyObject_AsWriteBuffer()`` are removed. Please migrate to new buffer protocol; :c:func:`PyObject_GetBuffer` and :c:func:`PyBuffer_Release`. - (Contributed by Inada Naoki in :issue:`41103`. + (Contributed by Inada Naoki in :issue:`41103`.) + +* Removed ``Py_UNICODE_str*`` functions manipulating ``Py_UNICODE*`` strings. + (Contributed by Inada Naoki in :issue:`41123`.) + + * ``Py_UNICODE_strlen``: use :c:func:`PyUnicode_GetLength` or + :c:macro:`PyUnicode_GET_LENGTH` + * ``Py_UNICODE_strcat``: use :c:func:`PyUnicode_CopyCharacters` or + :c:func:`PyUnicode_FromFormat` + * ``Py_UNICODE_strcpy``, ``Py_UNICODE_strncpy``: use + :c:func:`PyUnicode_CopyCharacters` or :c:func:`PyUnicode_Substring` + * ``Py_UNICODE_strcmp``: use :c:func:`PyUnicode_Compare` + * ``Py_UNICODE_strncmp``: use :c:func:`PyUnicode_Tailmatch` + * ``Py_UNICODE_strchr``, ``Py_UNICODE_strrchr``: use + :c:func:`PyUnicode_FindChar` diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 7e53ccc9e63f0..bcf99849f9f66 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -1163,43 +1163,6 @@ PyAPI_FUNC(int) _PyUnicode_IsAlpha( Py_UCS4 ch /* Unicode character */ ); -Py_DEPRECATED(3.3) PyAPI_FUNC(size_t) Py_UNICODE_strlen( - const Py_UNICODE *u - ); - -Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) Py_UNICODE_strcpy( - Py_UNICODE *s1, - const Py_UNICODE *s2); - -Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) Py_UNICODE_strcat( - Py_UNICODE *s1, const Py_UNICODE *s2); - -Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) Py_UNICODE_strncpy( - Py_UNICODE *s1, - const Py_UNICODE *s2, - size_t n); - -Py_DEPRECATED(3.3) PyAPI_FUNC(int) Py_UNICODE_strcmp( - const Py_UNICODE *s1, - const Py_UNICODE *s2 - ); - -Py_DEPRECATED(3.3) PyAPI_FUNC(int) Py_UNICODE_strncmp( - const Py_UNICODE *s1, - const Py_UNICODE *s2, - size_t n - ); - -Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) Py_UNICODE_strchr( - const Py_UNICODE *s, - Py_UNICODE c - ); - -Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) Py_UNICODE_strrchr( - const Py_UNICODE *s, - Py_UNICODE c - ); - PyAPI_FUNC(PyObject*) _PyUnicode_FormatLong(PyObject *, int, int, int); /* Create a copy of a unicode string ending with a nul character. Return NULL diff --git a/Misc/NEWS.d/next/C API/2020-06-26-13-29-25.bpo-41123.bRa1oy.rst b/Misc/NEWS.d/next/C API/2020-06-26-13-29-25.bpo-41123.bRa1oy.rst new file mode 100644 index 0000000000000..1261a8708d6c9 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-26-13-29-25.bpo-41123.bRa1oy.rst @@ -0,0 +1 @@ +Removed ``Py_UNICODE_str*`` functions manipulating ``Py_UNICODE*`` strings. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 55c886727ba2e..dc0f525c3bfdc 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -15888,94 +15888,6 @@ unicode_iter(PyObject *seq) return (PyObject *)it; } - -size_t -Py_UNICODE_strlen(const Py_UNICODE *u) -{ - return wcslen(u); -} - -Py_UNICODE* -Py_UNICODE_strcpy(Py_UNICODE *s1, const Py_UNICODE *s2) -{ - Py_UNICODE *u = s1; - while ((*u++ = *s2++)); - return s1; -} - -Py_UNICODE* -Py_UNICODE_strncpy(Py_UNICODE *s1, const Py_UNICODE *s2, size_t n) -{ - Py_UNICODE *u = s1; - while ((*u++ = *s2++)) - if (n-- == 0) - break; - return s1; -} - -Py_UNICODE* -Py_UNICODE_strcat(Py_UNICODE *s1, const Py_UNICODE *s2) -{ - Py_UNICODE *u1 = s1; - u1 += wcslen(u1); - while ((*u1++ = *s2++)); - return s1; -} - -int -Py_UNICODE_strcmp(const Py_UNICODE *s1, const Py_UNICODE *s2) -{ - while (*s1 && *s2 && *s1 == *s2) - s1++, s2++; - if (*s1 && *s2) - return (*s1 < *s2) ? -1 : +1; - if (*s1) - return 1; - if (*s2) - return -1; - return 0; -} - -int -Py_UNICODE_strncmp(const Py_UNICODE *s1, const Py_UNICODE *s2, size_t n) -{ - Py_UNICODE u1, u2; - for (; n != 0; n--) { - u1 = *s1; - u2 = *s2; - if (u1 != u2) - return (u1 < u2) ? -1 : +1; - if (u1 == '\0') - return 0; - s1++; - s2++; - } - return 0; -} - -Py_UNICODE* -Py_UNICODE_strchr(const Py_UNICODE *s, Py_UNICODE c) -{ - const Py_UNICODE *p; - for (p = s; *p; p++) - if (*p == c) - return (Py_UNICODE*)p; - return NULL; -} - -Py_UNICODE* -Py_UNICODE_strrchr(const Py_UNICODE *s, Py_UNICODE c) -{ - const Py_UNICODE *p; - p = s + wcslen(s); - while (p != s) { - p--; - if (*p == c) - return (Py_UNICODE*)p; - } - return NULL; -} - Py_UNICODE* PyUnicode_AsUnicodeCopy(PyObject *unicode) { From webhook-mailer at python.org Sat Jun 27 08:13:51 2020 From: webhook-mailer at python.org (Christian Heimes) Date: Sat, 27 Jun 2020 12:13:51 -0000 Subject: [Python-checkins] Remove dead code from tracemalloc (GH-21029) Message-ID: https://github.com/python/cpython/commit/9cfcdb7d6e4d09bde63bc7116b2ab0d96724527e commit: 9cfcdb7d6e4d09bde63bc7116b2ab0d96724527e branch: master author: Christian Heimes committer: GitHub date: 2020-06-27T14:13:47+02:00 summary: Remove dead code from tracemalloc (GH-21029) tracemalloc_get_frame() checked filename == NULL two times in a row. Signed-off-by: Christian Heimes files: M Modules/_tracemalloc.c diff --git a/Modules/_tracemalloc.c b/Modules/_tracemalloc.c index 567571657453e..fc91622d3925b 100644 --- a/Modules/_tracemalloc.c +++ b/Modules/_tracemalloc.c @@ -319,10 +319,6 @@ tracemalloc_get_frame(PyFrameObject *pyframe, frame_t *frame) return; } - assert(filename != NULL); - if (filename == NULL) - return; - if (!PyUnicode_Check(filename)) { #ifdef TRACE_DEBUG tracemalloc_error("filename is not a unicode string"); From webhook-mailer at python.org Sat Jun 27 08:44:58 2020 From: webhook-mailer at python.org (Ned Deily) Date: Sat, 27 Jun 2020 12:44:58 -0000 Subject: [Python-checkins] 3.6.11 Message-ID: https://github.com/python/cpython/commit/d56cd4006a1c5e07b0bf69fad9fc8e2fbf6aa855 commit: d56cd4006a1c5e07b0bf69fad9fc8e2fbf6aa855 branch: 3.6 author: Ned Deily committer: Ned Deily date: 2020-06-27T05:14:00-04:00 summary: 3.6.11 files: A Misc/NEWS.d/3.6.11.rst M Include/patchlevel.h M Mac/Resources/app/Info.plist.in M README.rst diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 9710c54b65cf6..9f686b346fba9 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -19,11 +19,11 @@ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 6 #define PY_MICRO_VERSION 11 -#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL +#define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.6.11rc1+" +#define PY_VERSION "3.6.11" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Mac/Resources/app/Info.plist.in b/Mac/Resources/app/Info.plist.in index 66b5e764c54b0..1d624984a8520 100644 --- a/Mac/Resources/app/Info.plist.in +++ b/Mac/Resources/app/Info.plist.in @@ -20,7 +20,7 @@ CFBundleExecutable Python CFBundleGetInfoString - %version%, (c) 2001-2016 Python Software Foundation. + %version%, (c) 2001-2020 Python Software Foundation. CFBundleHelpBookFolder Documentation @@ -55,7 +55,7 @@ NSAppleScriptEnabled NSHumanReadableCopyright - (c) 2001-2016 Python Software Foundation. + (c) 2001-2020 Python Software Foundation. NSHighResolutionCapable diff --git a/Misc/NEWS.d/3.6.11.rst b/Misc/NEWS.d/3.6.11.rst new file mode 100644 index 0000000000000..f5c5cffeedecf --- /dev/null +++ b/Misc/NEWS.d/3.6.11.rst @@ -0,0 +1,8 @@ +.. bpo: 0 +.. date: 2020-06-27 +.. no changes: True +.. nonce: NCau2w +.. release date: 2020-06-27 +.. section: Library + +There were no new changes in version 3.6.11. diff --git a/README.rst b/README.rst index 44e537c31ab76..d6c795f31ff5c 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -This is Python version 3.6.10 candidate 1+ -========================================== +This is Python version 3.6.11 +============================= .. image:: https://travis-ci.org/python/cpython.svg?branch=3.6 :alt: CPython build status on Travis CI From webhook-mailer at python.org Sat Jun 27 13:47:21 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sat, 27 Jun 2020 17:47:21 -0000 Subject: [Python-checkins] bpo-41132: Use pymalloc allocator in the f-string parser (GH-21173) Message-ID: https://github.com/python/cpython/commit/6dcbc2422de9e2a7ff89a4689572d84001e230b2 commit: 6dcbc2422de9e2a7ff89a4689572d84001e230b2 branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-27T18:47:00+01:00 summary: bpo-41132: Use pymalloc allocator in the f-string parser (GH-21173) files: M Parser/pegen.c M Parser/string_parser.c diff --git a/Parser/pegen.c b/Parser/pegen.c index 79fcd2f5999de..b4216fa2beaa7 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -395,7 +395,7 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, const char *fstring_msg = "f-string: "; Py_ssize_t len = strlen(fstring_msg) + strlen(errmsg); - char *new_errmsg = PyMem_RawMalloc(len + 1); // Lengths of both strings plus NULL character + char *new_errmsg = PyMem_Malloc(len + 1); // Lengths of both strings plus NULL character if (!new_errmsg) { return (void *) PyErr_NoMemory(); } @@ -443,7 +443,7 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, Py_DECREF(errstr); Py_DECREF(value); if (p->start_rule == Py_fstring_input) { - PyMem_RawFree((void *)errmsg); + PyMem_Free((void *)errmsg); } return NULL; @@ -451,7 +451,7 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, Py_XDECREF(errstr); Py_XDECREF(error_line); if (p->start_rule == Py_fstring_input) { - PyMem_RawFree((void *)errmsg); + PyMem_Free((void *)errmsg); } return NULL; } diff --git a/Parser/string_parser.c b/Parser/string_parser.c index f8e2427276cd3..ed7ca7ff834f8 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -592,7 +592,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, len = expr_end - expr_start; /* Allocate 3 extra bytes: open paren, close paren, null byte. */ - str = PyMem_RawMalloc(len + 3); + str = PyMem_Malloc(len + 3); if (str == NULL) { PyErr_NoMemory(); return NULL; @@ -605,7 +605,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, struct tok_state* tok = PyTokenizer_FromString(str, 1); if (tok == NULL) { - PyMem_RawFree(str); + PyMem_Free(str); return NULL; } Py_INCREF(p->tok->filename); @@ -631,7 +631,7 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, result = expr; exit: - PyMem_RawFree(str); + PyMem_Free(str); _PyPegen_Parser_Free(p2); PyTokenizer_Free(tok); return result; @@ -1143,7 +1143,7 @@ ExprList_Append(ExprList *l, expr_ty exp) Py_ssize_t i; /* We're still using the cached data. Switch to alloc-ing. */ - l->p = PyMem_RawMalloc(sizeof(expr_ty) * new_size); + l->p = PyMem_Malloc(sizeof(expr_ty) * new_size); if (!l->p) { return -1; } @@ -1153,9 +1153,9 @@ ExprList_Append(ExprList *l, expr_ty exp) } } else { /* Just realloc. */ - expr_ty *tmp = PyMem_RawRealloc(l->p, sizeof(expr_ty) * new_size); + expr_ty *tmp = PyMem_Realloc(l->p, sizeof(expr_ty) * new_size); if (!tmp) { - PyMem_RawFree(l->p); + PyMem_Free(l->p); l->p = NULL; return -1; } @@ -1183,7 +1183,7 @@ ExprList_Dealloc(ExprList *l) /* Do nothing. */ } else { /* We have dynamically allocated. Free the memory. */ - PyMem_RawFree(l->p); + PyMem_Free(l->p); } l->p = NULL; l->size = -1; From webhook-mailer at python.org Sat Jun 27 14:33:13 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sat, 27 Jun 2020 18:33:13 -0000 Subject: [Python-checkins] bpo-40769: Allow extra surrounding parentheses for invalid annotated assignment rule (GH-20387) Message-ID: https://github.com/python/cpython/commit/c8f29ad986f8274fc5fbf889bdd2a211878856b9 commit: c8f29ad986f8274fc5fbf889bdd2a211878856b9 branch: master author: Batuhan Taskaya committer: GitHub date: 2020-06-27T19:33:08+01:00 summary: bpo-40769: Allow extra surrounding parentheses for invalid annotated assignment rule (GH-20387) files: M Grammar/python.gram M Lib/test/test_syntax.py M Parser/parser.c diff --git a/Grammar/python.gram b/Grammar/python.gram index 652f0db2b175d..1cba11407468d 100644 --- a/Grammar/python.gram +++ b/Grammar/python.gram @@ -646,8 +646,12 @@ invalid_named_expression: RAISE_SYNTAX_ERROR_KNOWN_LOCATION( a, "cannot use assignment expressions with %s", _PyPegen_get_expr_name(a)) } invalid_assignment: - | a=list ':' expression { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not list) can be annotated") } - | a=tuple ':' expression { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } + | a=invalid_ann_assign_target ':' expression { + RAISE_SYNTAX_ERROR_KNOWN_LOCATION( + a, + "only single target (not %s) can be annotated", + _PyPegen_get_expr_name(a) + )} | a=star_named_expression ',' star_named_expressions* ':' expression { RAISE_SYNTAX_ERROR_KNOWN_LOCATION(a, "only single target (not tuple) can be annotated") } | a=expression ':' expression { @@ -661,6 +665,10 @@ invalid_assignment: "'%s' is an illegal expression for augmented assignment", _PyPegen_get_expr_name(a) )} +invalid_ann_assign_target[expr_ty]: + | list + | tuple + | '(' a=invalid_ann_assign_target ')' { a } invalid_del_stmt: | 'del' a=star_expressions { RAISE_SYNTAX_ERROR_INVALID_TARGET(DEL_TARGETS, a) } diff --git a/Lib/test/test_syntax.py b/Lib/test/test_syntax.py index 812a7df3228bc..4657fd1c0d8a7 100644 --- a/Lib/test/test_syntax.py +++ b/Lib/test/test_syntax.py @@ -733,6 +733,19 @@ Traceback (most recent call last): SyntaxError: trailing comma not allowed without surrounding parentheses +>>> (): int +Traceback (most recent call last): +SyntaxError: only single target (not tuple) can be annotated +>>> []: int +Traceback (most recent call last): +SyntaxError: only single target (not list) can be annotated +>>> (()): int +Traceback (most recent call last): +SyntaxError: only single target (not tuple) can be annotated +>>> ([]): int +Traceback (most recent call last): +SyntaxError: only single target (not list) can be annotated + Corner-cases that used to fail to raise the correct error: >>> def f(*, x=lambda __debug__:0): pass diff --git a/Parser/parser.c b/Parser/parser.c index a235c251fc9ac..bfd5c47caf07e 100644 --- a/Parser/parser.c +++ b/Parser/parser.c @@ -213,173 +213,174 @@ static KeywordToken *reserved_keywords[] = { #define invalid_kwarg_type 1144 #define invalid_named_expression_type 1145 #define invalid_assignment_type 1146 -#define invalid_del_stmt_type 1147 -#define invalid_block_type 1148 -#define invalid_comprehension_type 1149 -#define invalid_dict_comprehension_type 1150 -#define invalid_parameters_type 1151 -#define invalid_lambda_parameters_type 1152 -#define invalid_star_etc_type 1153 -#define invalid_lambda_star_etc_type 1154 -#define invalid_double_type_comments_type 1155 -#define invalid_with_item_type 1156 -#define invalid_for_target_type 1157 -#define invalid_group_type 1158 -#define invalid_import_from_targets_type 1159 -#define _loop0_1_type 1160 -#define _loop0_2_type 1161 -#define _loop0_4_type 1162 -#define _gather_3_type 1163 -#define _loop0_6_type 1164 -#define _gather_5_type 1165 -#define _loop0_8_type 1166 -#define _gather_7_type 1167 -#define _loop0_10_type 1168 -#define _gather_9_type 1169 -#define _loop1_11_type 1170 -#define _loop0_13_type 1171 -#define _gather_12_type 1172 -#define _tmp_14_type 1173 -#define _tmp_15_type 1174 -#define _tmp_16_type 1175 -#define _tmp_17_type 1176 -#define _tmp_18_type 1177 -#define _tmp_19_type 1178 -#define _tmp_20_type 1179 -#define _tmp_21_type 1180 -#define _loop1_22_type 1181 -#define _tmp_23_type 1182 -#define _tmp_24_type 1183 -#define _loop0_26_type 1184 -#define _gather_25_type 1185 -#define _loop0_28_type 1186 -#define _gather_27_type 1187 -#define _tmp_29_type 1188 -#define _tmp_30_type 1189 -#define _loop0_31_type 1190 -#define _loop1_32_type 1191 -#define _loop0_34_type 1192 -#define _gather_33_type 1193 -#define _tmp_35_type 1194 -#define _loop0_37_type 1195 -#define _gather_36_type 1196 -#define _tmp_38_type 1197 -#define _loop0_40_type 1198 -#define _gather_39_type 1199 -#define _loop0_42_type 1200 -#define _gather_41_type 1201 -#define _loop0_44_type 1202 -#define _gather_43_type 1203 -#define _loop0_46_type 1204 -#define _gather_45_type 1205 -#define _tmp_47_type 1206 -#define _loop1_48_type 1207 -#define _tmp_49_type 1208 -#define _tmp_50_type 1209 -#define _tmp_51_type 1210 -#define _tmp_52_type 1211 -#define _tmp_53_type 1212 -#define _loop0_54_type 1213 -#define _loop0_55_type 1214 -#define _loop0_56_type 1215 -#define _loop1_57_type 1216 -#define _loop0_58_type 1217 -#define _loop1_59_type 1218 -#define _loop1_60_type 1219 -#define _loop1_61_type 1220 -#define _loop0_62_type 1221 -#define _loop1_63_type 1222 -#define _loop0_64_type 1223 -#define _loop1_65_type 1224 -#define _loop0_66_type 1225 -#define _loop1_67_type 1226 -#define _loop1_68_type 1227 -#define _tmp_69_type 1228 -#define _loop0_71_type 1229 -#define _gather_70_type 1230 -#define _loop1_72_type 1231 -#define _loop0_74_type 1232 -#define _gather_73_type 1233 -#define _loop1_75_type 1234 -#define _loop0_76_type 1235 -#define _loop0_77_type 1236 -#define _loop0_78_type 1237 -#define _loop1_79_type 1238 -#define _loop0_80_type 1239 -#define _loop1_81_type 1240 -#define _loop1_82_type 1241 -#define _loop1_83_type 1242 -#define _loop0_84_type 1243 -#define _loop1_85_type 1244 -#define _loop0_86_type 1245 -#define _loop1_87_type 1246 -#define _loop0_88_type 1247 -#define _loop1_89_type 1248 -#define _loop1_90_type 1249 -#define _loop1_91_type 1250 -#define _loop1_92_type 1251 -#define _tmp_93_type 1252 -#define _loop0_95_type 1253 -#define _gather_94_type 1254 -#define _tmp_96_type 1255 -#define _tmp_97_type 1256 -#define _tmp_98_type 1257 -#define _tmp_99_type 1258 -#define _loop1_100_type 1259 -#define _tmp_101_type 1260 -#define _tmp_102_type 1261 -#define _loop0_104_type 1262 -#define _gather_103_type 1263 -#define _loop1_105_type 1264 -#define _loop0_106_type 1265 -#define _loop0_107_type 1266 -#define _tmp_108_type 1267 -#define _tmp_109_type 1268 -#define _loop0_111_type 1269 -#define _gather_110_type 1270 -#define _loop0_113_type 1271 -#define _gather_112_type 1272 -#define _loop0_115_type 1273 -#define _gather_114_type 1274 -#define _loop0_117_type 1275 -#define _gather_116_type 1276 -#define _loop0_118_type 1277 -#define _loop0_120_type 1278 -#define _gather_119_type 1279 -#define _tmp_121_type 1280 -#define _loop0_123_type 1281 -#define _gather_122_type 1282 -#define _loop0_125_type 1283 -#define _gather_124_type 1284 -#define _tmp_126_type 1285 -#define _loop0_127_type 1286 -#define _loop0_128_type 1287 -#define _loop0_129_type 1288 -#define _tmp_130_type 1289 -#define _tmp_131_type 1290 -#define _loop0_132_type 1291 -#define _tmp_133_type 1292 -#define _loop0_134_type 1293 -#define _tmp_135_type 1294 -#define _tmp_136_type 1295 -#define _tmp_137_type 1296 -#define _tmp_138_type 1297 -#define _tmp_139_type 1298 -#define _tmp_140_type 1299 -#define _tmp_141_type 1300 -#define _tmp_142_type 1301 -#define _tmp_143_type 1302 -#define _tmp_144_type 1303 -#define _tmp_145_type 1304 -#define _tmp_146_type 1305 -#define _tmp_147_type 1306 -#define _tmp_148_type 1307 -#define _tmp_149_type 1308 -#define _tmp_150_type 1309 -#define _loop1_151_type 1310 -#define _loop1_152_type 1311 -#define _tmp_153_type 1312 -#define _tmp_154_type 1313 +#define invalid_ann_assign_target_type 1147 +#define invalid_del_stmt_type 1148 +#define invalid_block_type 1149 +#define invalid_comprehension_type 1150 +#define invalid_dict_comprehension_type 1151 +#define invalid_parameters_type 1152 +#define invalid_lambda_parameters_type 1153 +#define invalid_star_etc_type 1154 +#define invalid_lambda_star_etc_type 1155 +#define invalid_double_type_comments_type 1156 +#define invalid_with_item_type 1157 +#define invalid_for_target_type 1158 +#define invalid_group_type 1159 +#define invalid_import_from_targets_type 1160 +#define _loop0_1_type 1161 +#define _loop0_2_type 1162 +#define _loop0_4_type 1163 +#define _gather_3_type 1164 +#define _loop0_6_type 1165 +#define _gather_5_type 1166 +#define _loop0_8_type 1167 +#define _gather_7_type 1168 +#define _loop0_10_type 1169 +#define _gather_9_type 1170 +#define _loop1_11_type 1171 +#define _loop0_13_type 1172 +#define _gather_12_type 1173 +#define _tmp_14_type 1174 +#define _tmp_15_type 1175 +#define _tmp_16_type 1176 +#define _tmp_17_type 1177 +#define _tmp_18_type 1178 +#define _tmp_19_type 1179 +#define _tmp_20_type 1180 +#define _tmp_21_type 1181 +#define _loop1_22_type 1182 +#define _tmp_23_type 1183 +#define _tmp_24_type 1184 +#define _loop0_26_type 1185 +#define _gather_25_type 1186 +#define _loop0_28_type 1187 +#define _gather_27_type 1188 +#define _tmp_29_type 1189 +#define _tmp_30_type 1190 +#define _loop0_31_type 1191 +#define _loop1_32_type 1192 +#define _loop0_34_type 1193 +#define _gather_33_type 1194 +#define _tmp_35_type 1195 +#define _loop0_37_type 1196 +#define _gather_36_type 1197 +#define _tmp_38_type 1198 +#define _loop0_40_type 1199 +#define _gather_39_type 1200 +#define _loop0_42_type 1201 +#define _gather_41_type 1202 +#define _loop0_44_type 1203 +#define _gather_43_type 1204 +#define _loop0_46_type 1205 +#define _gather_45_type 1206 +#define _tmp_47_type 1207 +#define _loop1_48_type 1208 +#define _tmp_49_type 1209 +#define _tmp_50_type 1210 +#define _tmp_51_type 1211 +#define _tmp_52_type 1212 +#define _tmp_53_type 1213 +#define _loop0_54_type 1214 +#define _loop0_55_type 1215 +#define _loop0_56_type 1216 +#define _loop1_57_type 1217 +#define _loop0_58_type 1218 +#define _loop1_59_type 1219 +#define _loop1_60_type 1220 +#define _loop1_61_type 1221 +#define _loop0_62_type 1222 +#define _loop1_63_type 1223 +#define _loop0_64_type 1224 +#define _loop1_65_type 1225 +#define _loop0_66_type 1226 +#define _loop1_67_type 1227 +#define _loop1_68_type 1228 +#define _tmp_69_type 1229 +#define _loop0_71_type 1230 +#define _gather_70_type 1231 +#define _loop1_72_type 1232 +#define _loop0_74_type 1233 +#define _gather_73_type 1234 +#define _loop1_75_type 1235 +#define _loop0_76_type 1236 +#define _loop0_77_type 1237 +#define _loop0_78_type 1238 +#define _loop1_79_type 1239 +#define _loop0_80_type 1240 +#define _loop1_81_type 1241 +#define _loop1_82_type 1242 +#define _loop1_83_type 1243 +#define _loop0_84_type 1244 +#define _loop1_85_type 1245 +#define _loop0_86_type 1246 +#define _loop1_87_type 1247 +#define _loop0_88_type 1248 +#define _loop1_89_type 1249 +#define _loop1_90_type 1250 +#define _loop1_91_type 1251 +#define _loop1_92_type 1252 +#define _tmp_93_type 1253 +#define _loop0_95_type 1254 +#define _gather_94_type 1255 +#define _tmp_96_type 1256 +#define _tmp_97_type 1257 +#define _tmp_98_type 1258 +#define _tmp_99_type 1259 +#define _loop1_100_type 1260 +#define _tmp_101_type 1261 +#define _tmp_102_type 1262 +#define _loop0_104_type 1263 +#define _gather_103_type 1264 +#define _loop1_105_type 1265 +#define _loop0_106_type 1266 +#define _loop0_107_type 1267 +#define _tmp_108_type 1268 +#define _tmp_109_type 1269 +#define _loop0_111_type 1270 +#define _gather_110_type 1271 +#define _loop0_113_type 1272 +#define _gather_112_type 1273 +#define _loop0_115_type 1274 +#define _gather_114_type 1275 +#define _loop0_117_type 1276 +#define _gather_116_type 1277 +#define _loop0_118_type 1278 +#define _loop0_120_type 1279 +#define _gather_119_type 1280 +#define _tmp_121_type 1281 +#define _loop0_123_type 1282 +#define _gather_122_type 1283 +#define _loop0_125_type 1284 +#define _gather_124_type 1285 +#define _tmp_126_type 1286 +#define _loop0_127_type 1287 +#define _loop0_128_type 1288 +#define _loop0_129_type 1289 +#define _tmp_130_type 1290 +#define _tmp_131_type 1291 +#define _loop0_132_type 1292 +#define _tmp_133_type 1293 +#define _loop0_134_type 1294 +#define _tmp_135_type 1295 +#define _tmp_136_type 1296 +#define _tmp_137_type 1297 +#define _tmp_138_type 1298 +#define _tmp_139_type 1299 +#define _tmp_140_type 1300 +#define _tmp_141_type 1301 +#define _tmp_142_type 1302 +#define _tmp_143_type 1303 +#define _tmp_144_type 1304 +#define _tmp_145_type 1305 +#define _tmp_146_type 1306 +#define _tmp_147_type 1307 +#define _tmp_148_type 1308 +#define _tmp_149_type 1309 +#define _tmp_150_type 1310 +#define _loop1_151_type 1311 +#define _loop1_152_type 1312 +#define _tmp_153_type 1313 +#define _tmp_154_type 1314 static mod_ty file_rule(Parser *p); static mod_ty interactive_rule(Parser *p); @@ -528,6 +529,7 @@ static void *incorrect_arguments_rule(Parser *p); static void *invalid_kwarg_rule(Parser *p); static void *invalid_named_expression_rule(Parser *p); static void *invalid_assignment_rule(Parser *p); +static expr_ty invalid_ann_assign_target_rule(Parser *p); static void *invalid_del_stmt_rule(Parser *p); static void *invalid_block_rule(Parser *p); static void *invalid_comprehension_rule(Parser *p); @@ -14660,8 +14662,7 @@ invalid_named_expression_rule(Parser *p) } // invalid_assignment: -// | list ':' expression -// | tuple ':' expression +// | invalid_ann_assign_target ':' expression // | star_named_expression ',' star_named_expressions* ':' expression // | expression ':' expression // | ((star_targets '='))* star_expressions '=' @@ -14677,25 +14678,25 @@ invalid_assignment_rule(Parser *p) } void * _res = NULL; int _mark = p->mark; - { // list ':' expression + { // invalid_ann_assign_target ':' expression if (p->error_indicator) { D(p->level--); return NULL; } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list ':' expression")); + D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "invalid_ann_assign_target ':' expression")); Token * _literal; expr_ty a; expr_ty expression_var; if ( - (a = list_rule(p)) // list + (a = invalid_ann_assign_target_rule(p)) // invalid_ann_assign_target && (_literal = _PyPegen_expect_token(p, 11)) // token=':' && (expression_var = expression_rule(p)) // expression ) { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list ':' expression")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not list) can be annotated" ); + D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "invalid_ann_assign_target ':' expression")); + _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not %s) can be annotated" , _PyPegen_get_expr_name ( a ) ); if (_res == NULL && PyErr_Occurred()) { p->error_indicator = 1; D(p->level--); @@ -14705,37 +14706,7 @@ invalid_assignment_rule(Parser *p) } p->mark = _mark; D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list ':' expression")); - } - { // tuple ':' expression - if (p->error_indicator) { - D(p->level--); - return NULL; - } - D(fprintf(stderr, "%*c> invalid_assignment[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple ':' expression")); - Token * _literal; - expr_ty a; - expr_ty expression_var; - if ( - (a = tuple_rule(p)) // tuple - && - (_literal = _PyPegen_expect_token(p, 11)) // token=':' - && - (expression_var = expression_rule(p)) // expression - ) - { - D(fprintf(stderr, "%*c+ invalid_assignment[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple ':' expression")); - _res = RAISE_SYNTAX_ERROR_KNOWN_LOCATION ( a , "only single target (not tuple) can be annotated" ); - if (_res == NULL && PyErr_Occurred()) { - p->error_indicator = 1; - D(p->level--); - return NULL; - } - goto done; - } - p->mark = _mark; - D(fprintf(stderr, "%*c%s invalid_assignment[%d-%d]: %s failed!\n", p->level, ' ', - p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple ':' expression")); + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "invalid_ann_assign_target ':' expression")); } { // star_named_expression ',' star_named_expressions* ':' expression if (p->error_indicator) { @@ -14899,6 +14870,91 @@ invalid_assignment_rule(Parser *p) return _res; } +// invalid_ann_assign_target: list | tuple | '(' invalid_ann_assign_target ')' +static expr_ty +invalid_ann_assign_target_rule(Parser *p) +{ + D(p->level++); + if (p->error_indicator) { + D(p->level--); + return NULL; + } + expr_ty _res = NULL; + int _mark = p->mark; + { // list + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_ann_assign_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "list")); + expr_ty list_var; + if ( + (list_var = list_rule(p)) // list + ) + { + D(fprintf(stderr, "%*c+ invalid_ann_assign_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "list")); + _res = list_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_ann_assign_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "list")); + } + { // tuple + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_ann_assign_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "tuple")); + expr_ty tuple_var; + if ( + (tuple_var = tuple_rule(p)) // tuple + ) + { + D(fprintf(stderr, "%*c+ invalid_ann_assign_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "tuple")); + _res = tuple_var; + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_ann_assign_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "tuple")); + } + { // '(' invalid_ann_assign_target ')' + if (p->error_indicator) { + D(p->level--); + return NULL; + } + D(fprintf(stderr, "%*c> invalid_ann_assign_target[%d-%d]: %s\n", p->level, ' ', _mark, p->mark, "'(' invalid_ann_assign_target ')'")); + Token * _literal; + Token * _literal_1; + expr_ty a; + if ( + (_literal = _PyPegen_expect_token(p, 7)) // token='(' + && + (a = invalid_ann_assign_target_rule(p)) // invalid_ann_assign_target + && + (_literal_1 = _PyPegen_expect_token(p, 8)) // token=')' + ) + { + D(fprintf(stderr, "%*c+ invalid_ann_assign_target[%d-%d]: %s succeeded!\n", p->level, ' ', _mark, p->mark, "'(' invalid_ann_assign_target ')'")); + _res = a; + if (_res == NULL && PyErr_Occurred()) { + p->error_indicator = 1; + D(p->level--); + return NULL; + } + goto done; + } + p->mark = _mark; + D(fprintf(stderr, "%*c%s invalid_ann_assign_target[%d-%d]: %s failed!\n", p->level, ' ', + p->error_indicator ? "ERROR!" : "-", _mark, p->mark, "'(' invalid_ann_assign_target ')'")); + } + _res = NULL; + done: + D(p->level--); + return _res; +} + // invalid_del_stmt: 'del' star_expressions static void * invalid_del_stmt_rule(Parser *p) From webhook-mailer at python.org Sat Jun 27 14:43:45 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sat, 27 Jun 2020 18:43:45 -0000 Subject: [Python-checkins] [3.8] bpo-41132: Use pymalloc allocator in the f-string parser (GH-21173) (GH-21184) Message-ID: https://github.com/python/cpython/commit/749d3bc04177ff9e2ddfd58d919b84cb4f6cf894 commit: 749d3bc04177ff9e2ddfd58d919b84cb4f6cf894 branch: 3.8 author: Lysandros Nikolaou committer: GitHub date: 2020-06-27T11:43:41-07:00 summary: [3.8] bpo-41132: Use pymalloc allocator in the f-string parser (GH-21173) (GH-21184) Automerge-Triggered-By: @pablogsal files: M Python/ast.c diff --git a/Python/ast.c b/Python/ast.c index 0a999fcca43a8..5efb690c299ca 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -4898,7 +4898,7 @@ fstring_compile_expr(const char *expr_start, const char *expr_end, len = expr_end - expr_start; /* Allocate 3 extra bytes: open paren, close paren, null byte. */ - str = PyMem_RawMalloc(len + 3); + str = PyMem_Malloc(len + 3); if (str == NULL) { PyErr_NoMemory(); return NULL; @@ -4914,7 +4914,7 @@ fstring_compile_expr(const char *expr_start, const char *expr_end, mod_n = PyParser_SimpleParseStringFlagsFilename(str, "", Py_eval_input, 0); if (!mod_n) { - PyMem_RawFree(str); + PyMem_Free(str); return NULL; } /* Reuse str to find the correct column offset. */ @@ -4922,7 +4922,7 @@ fstring_compile_expr(const char *expr_start, const char *expr_end, str[len+1] = '}'; fstring_fix_node_location(n, mod_n, str); mod = PyAST_FromNode(mod_n, &cf, "", c->c_arena); - PyMem_RawFree(str); + PyMem_Free(str); PyNode_Free(mod_n); if (!mod) return NULL; @@ -5438,7 +5438,7 @@ ExprList_Append(ExprList *l, expr_ty exp) Py_ssize_t i; /* We're still using the cached data. Switch to alloc-ing. */ - l->p = PyMem_RawMalloc(sizeof(expr_ty) * new_size); + l->p = PyMem_Malloc(sizeof(expr_ty) * new_size); if (!l->p) return -1; /* Copy the cached data into the new buffer. */ @@ -5446,9 +5446,9 @@ ExprList_Append(ExprList *l, expr_ty exp) l->p[i] = l->data[i]; } else { /* Just realloc. */ - expr_ty *tmp = PyMem_RawRealloc(l->p, sizeof(expr_ty) * new_size); + expr_ty *tmp = PyMem_Realloc(l->p, sizeof(expr_ty) * new_size); if (!tmp) { - PyMem_RawFree(l->p); + PyMem_Free(l->p); l->p = NULL; return -1; } @@ -5476,7 +5476,7 @@ ExprList_Dealloc(ExprList *l) /* Do nothing. */ } else { /* We have dynamically allocated. Free the memory. */ - PyMem_RawFree(l->p); + PyMem_Free(l->p); } l->p = NULL; l->size = -1; From webhook-mailer at python.org Sat Jun 27 15:00:38 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sat, 27 Jun 2020 19:00:38 -0000 Subject: [Python-checkins] Add soft keywords to the documentation (GH-21185) Message-ID: https://github.com/python/cpython/commit/89e82c4a6285c89c054980591c078245a5cc6337 commit: 89e82c4a6285c89c054980591c078245a5cc6337 branch: master author: Pablo Galindo committer: GitHub date: 2020-06-27T20:00:29+01:00 summary: Add soft keywords to the documentation (GH-21185) files: M Doc/library/keyword.rst diff --git a/Doc/library/keyword.rst b/Doc/library/keyword.rst index acec45cdcd586..5cae79f5dc9db 100644 --- a/Doc/library/keyword.rst +++ b/Doc/library/keyword.rst @@ -22,3 +22,19 @@ This module allows a Python program to determine if a string is a Sequence containing all the :ref:`keywords ` defined for the interpreter. If any keywords are defined to only be active when particular :mod:`__future__` statements are in effect, these will be included as well. + + +.. function:: issoftkeyword(s) + + Return ``True`` if *s* is a Python soft :ref:`keyword `. + + .. versionadded:: 3.9 + + +.. data:: softkwlist + + Sequence containing all the soft :ref:`keywords ` defined for the + interpreter. If any soft keywords are defined to only be active when particular + :mod:`__future__` statements are in effect, these will be included as well. + + .. versionadded:: 3.9 From webhook-mailer at python.org Sat Jun 27 19:41:53 2020 From: webhook-mailer at python.org (Lysandros Nikolaou) Date: Sat, 27 Jun 2020 23:41:53 -0000 Subject: [Python-checkins] bpo-41076: Pre-feed the parser with the f-string expression location (GH-21054) Message-ID: https://github.com/python/cpython/commit/1f0f4abb110b9fbade6175842b6a26ab0b8df6dd commit: 1f0f4abb110b9fbade6175842b6a26ab0b8df6dd branch: master author: Lysandros Nikolaou committer: GitHub date: 2020-06-28T00:41:48+01:00 summary: bpo-41076: Pre-feed the parser with the f-string expression location (GH-21054) This commit changes the parsing of f-string expressions with the new parser. The parser gets pre-fed with the location of the expression itself (not the f-string, which was what we were doing before). This allows us to completely skip the shifting of the AST nodes after the parsing is completed. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-22-13-22-30.bpo-41076.eWYw2N.rst M Parser/pegen.c M Parser/string_parser.c M Python/importlib.h M Python/importlib_external.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-22-13-22-30.bpo-41076.eWYw2N.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-22-13-22-30.bpo-41076.eWYw2N.rst new file mode 100644 index 0000000000000..f13560ad9d269 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-22-13-22-30.bpo-41076.eWYw2N.rst @@ -0,0 +1 @@ +Pre-feed the parser with the location of the f-string expression, not the f-string itself, which allows us to skip the shifting of the AST node locations after the parsing is completed. \ No newline at end of file diff --git a/Parser/pegen.c b/Parser/pegen.c index b4216fa2beaa7..19762b06d3caf 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -423,6 +423,9 @@ _PyPegen_raise_error_known_location(Parser *p, PyObject *errtype, } } + if (p->start_rule == Py_fstring_input) { + col_offset -= p->starting_col_offset; + } Py_ssize_t col_number = col_offset; if (p->tok->encoding != NULL) { diff --git a/Parser/string_parser.c b/Parser/string_parser.c index ed7ca7ff834f8..9f56ce21d0f20 100644 --- a/Parser/string_parser.c +++ b/Parser/string_parser.c @@ -271,235 +271,6 @@ _PyPegen_parsestr(Parser *p, int *bytesmode, int *rawmode, PyObject **result, // FSTRING STUFF -static void fstring_shift_expr_locations(expr_ty n, int lineno, int col_offset); -static void fstring_shift_argument(expr_ty parent, arg_ty args, int lineno, int col_offset); - - -static inline void shift_expr(expr_ty parent, expr_ty n, int line, int col) { - if (n == NULL) { - return; - } - if (parent->lineno < n->lineno) { - col = 0; - } - fstring_shift_expr_locations(n, line, col); -} - -static inline void shift_arg(expr_ty parent, arg_ty n, int line, int col) { - if (parent->lineno < n->lineno) { - col = 0; - } - fstring_shift_argument(parent, n, line, col); -} - -static void fstring_shift_seq_locations(expr_ty parent, asdl_seq *seq, int lineno, int col_offset) { - for (Py_ssize_t i = 0, l = asdl_seq_LEN(seq); i < l; i++) { - expr_ty expr = asdl_seq_GET(seq, i); - if (expr == NULL){ - continue; - } - shift_expr(parent, expr, lineno, col_offset); - } -} - -static void fstring_shift_slice_locations(expr_ty parent, expr_ty slice, int lineno, int col_offset) { - switch (slice->kind) { - case Slice_kind: - if (slice->v.Slice.lower) { - shift_expr(parent, slice->v.Slice.lower, lineno, col_offset); - } - if (slice->v.Slice.upper) { - shift_expr(parent, slice->v.Slice.upper, lineno, col_offset); - } - if (slice->v.Slice.step) { - shift_expr(parent, slice->v.Slice.step, lineno, col_offset); - } - break; - case Tuple_kind: - fstring_shift_seq_locations(parent, slice->v.Tuple.elts, lineno, col_offset); - break; - default: - break; - } -} - -static void fstring_shift_comprehension(expr_ty parent, comprehension_ty comp, int lineno, int col_offset) { - shift_expr(parent, comp->target, lineno, col_offset); - shift_expr(parent, comp->iter, lineno, col_offset); - fstring_shift_seq_locations(parent, comp->ifs, lineno, col_offset); -} - -static void fstring_shift_argument(expr_ty parent, arg_ty arg, int lineno, int col_offset) { - if (arg->annotation != NULL){ - shift_expr(parent, arg->annotation, lineno, col_offset); - } - arg->col_offset = arg->col_offset + col_offset; - arg->end_col_offset = arg->end_col_offset + col_offset; - arg->lineno = arg->lineno + lineno; - arg->end_lineno = arg->end_lineno + lineno; -} - -static void fstring_shift_arguments(expr_ty parent, arguments_ty args, int lineno, int col_offset) { - for (Py_ssize_t i = 0, l = asdl_seq_LEN(args->posonlyargs); i < l; i++) { - arg_ty arg = asdl_seq_GET(args->posonlyargs, i); - shift_arg(parent, arg, lineno, col_offset); - } - - for (Py_ssize_t i = 0, l = asdl_seq_LEN(args->args); i < l; i++) { - arg_ty arg = asdl_seq_GET(args->args, i); - shift_arg(parent, arg, lineno, col_offset); - } - - if (args->vararg != NULL) { - shift_arg(parent, args->vararg, lineno, col_offset); - } - - for (Py_ssize_t i = 0, l = asdl_seq_LEN(args->kwonlyargs); i < l; i++) { - arg_ty arg = asdl_seq_GET(args->kwonlyargs, i); - shift_arg(parent, arg, lineno, col_offset); - } - - fstring_shift_seq_locations(parent, args->kw_defaults, lineno, col_offset); - - if (args->kwarg != NULL) { - shift_arg(parent, args->kwarg, lineno, col_offset); - } - - fstring_shift_seq_locations(parent, args->defaults, lineno, col_offset); -} - -static void fstring_shift_children_locations(expr_ty node, int lineno, int col_offset) { - switch (node->kind) { - case BoolOp_kind: - fstring_shift_seq_locations(node, node->v.BoolOp.values, lineno, col_offset); - break; - case NamedExpr_kind: - shift_expr(node, node->v.NamedExpr.target, lineno, col_offset); - shift_expr(node, node->v.NamedExpr.value, lineno, col_offset); - break; - case BinOp_kind: - shift_expr(node, node->v.BinOp.left, lineno, col_offset); - shift_expr(node, node->v.BinOp.right, lineno, col_offset); - break; - case UnaryOp_kind: - shift_expr(node, node->v.UnaryOp.operand, lineno, col_offset); - break; - case Lambda_kind: - fstring_shift_arguments(node, node->v.Lambda.args, lineno, col_offset); - shift_expr(node, node->v.Lambda.body, lineno, col_offset); - break; - case IfExp_kind: - shift_expr(node, node->v.IfExp.test, lineno, col_offset); - shift_expr(node, node->v.IfExp.body, lineno, col_offset); - shift_expr(node, node->v.IfExp.orelse, lineno, col_offset); - break; - case Dict_kind: - fstring_shift_seq_locations(node, node->v.Dict.keys, lineno, col_offset); - fstring_shift_seq_locations(node, node->v.Dict.values, lineno, col_offset); - break; - case Set_kind: - fstring_shift_seq_locations(node, node->v.Set.elts, lineno, col_offset); - break; - case ListComp_kind: - shift_expr(node, node->v.ListComp.elt, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.ListComp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(node->v.ListComp.generators, i); - fstring_shift_comprehension(node, comp, lineno, col_offset); - } - break; - case SetComp_kind: - shift_expr(node, node->v.SetComp.elt, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.SetComp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(node->v.SetComp.generators, i); - fstring_shift_comprehension(node, comp, lineno, col_offset); - } - break; - case DictComp_kind: - shift_expr(node, node->v.DictComp.key, lineno, col_offset); - shift_expr(node, node->v.DictComp.value, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.DictComp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(node->v.DictComp.generators, i); - fstring_shift_comprehension(node, comp, lineno, col_offset); - } - break; - case GeneratorExp_kind: - shift_expr(node, node->v.GeneratorExp.elt, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.GeneratorExp.generators); i < l; i++) { - comprehension_ty comp = asdl_seq_GET(node->v.GeneratorExp.generators, i); - fstring_shift_comprehension(node, comp, lineno, col_offset); - } - break; - case Await_kind: - shift_expr(node, node->v.Await.value, lineno, col_offset); - break; - case Yield_kind: - shift_expr(node, node->v.Yield.value, lineno, col_offset); - break; - case YieldFrom_kind: - shift_expr(node, node->v.YieldFrom.value, lineno, col_offset); - break; - case Compare_kind: - shift_expr(node, node->v.Compare.left, lineno, col_offset); - fstring_shift_seq_locations(node, node->v.Compare.comparators, lineno, col_offset); - break; - case Call_kind: - shift_expr(node, node->v.Call.func, lineno, col_offset); - fstring_shift_seq_locations(node, node->v.Call.args, lineno, col_offset); - for (Py_ssize_t i = 0, l = asdl_seq_LEN(node->v.Call.keywords); i < l; i++) { - keyword_ty keyword = asdl_seq_GET(node->v.Call.keywords, i); - shift_expr(node, keyword->value, lineno, col_offset); - } - break; - case Attribute_kind: - shift_expr(node, node->v.Attribute.value, lineno, col_offset); - break; - case Subscript_kind: - shift_expr(node, node->v.Subscript.value, lineno, col_offset); - fstring_shift_slice_locations(node, node->v.Subscript.slice, lineno, col_offset); - shift_expr(node, node->v.Subscript.slice, lineno, col_offset); - break; - case Starred_kind: - shift_expr(node, node->v.Starred.value, lineno, col_offset); - break; - case List_kind: - fstring_shift_seq_locations(node, node->v.List.elts, lineno, col_offset); - break; - case Tuple_kind: - fstring_shift_seq_locations(node, node->v.Tuple.elts, lineno, col_offset); - break; - case JoinedStr_kind: - fstring_shift_seq_locations(node, node->v.JoinedStr.values, lineno, col_offset); - break; - case FormattedValue_kind: - shift_expr(node, node->v.FormattedValue.value, lineno, col_offset); - if (node->v.FormattedValue.format_spec) { - shift_expr(node, node->v.FormattedValue.format_spec, lineno, col_offset); - } - break; - default: - return; - } -} - -/* Shift locations for the given node and all its children by adding `lineno` - and `col_offset` to existing locations. Note that n is the already parsed - expression. */ -static void fstring_shift_expr_locations(expr_ty n, int lineno, int col_offset) -{ - n->col_offset = n->col_offset + col_offset; - - // The following is needed, in order for nodes spanning across multiple lines - // to be shifted correctly. An example of such a node is a Call node, the closing - // parenthesis of which is not on the same line as its name. - if (n->lineno == n->end_lineno) { - n->end_col_offset = n->end_col_offset + col_offset; - } - - fstring_shift_children_locations(n, lineno, col_offset); - n->lineno = n->lineno + lineno; - n->end_lineno = n->end_lineno + lineno; -} - /* Fix locations for the given node and its children. `parent` is the enclosing node. @@ -507,7 +278,7 @@ static void fstring_shift_expr_locations(expr_ty n, int lineno, int col_offset) `expr_str` is the child node's string representation, including braces. */ static void -fstring_fix_expr_location(Token *parent, expr_ty n, char *expr_str) +fstring_find_expr_location(Token *parent, char *expr_str, int *p_lines, int *p_cols) { char *substr = NULL; char *start; @@ -552,7 +323,8 @@ fstring_fix_expr_location(Token *parent, expr_ty n, char *expr_str) } } } - fstring_shift_expr_locations(n, lines, cols); + *p_lines = lines; + *p_cols = cols; } @@ -598,11 +370,26 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, return NULL; } - str[0] = '('; + // The call to fstring_find_expr_location is responsible for finding the column offset + // the generated AST nodes need to be shifted to the right, which is equal to the number + // of the f-string characters before the expression starts. In order to correctly compute + // this offset, strstr gets called in fstring_find_expr_location which only succeeds + // if curly braces appear before and after the f-string expression (exactly like they do + // in the f-string itself), hence the following lines. + str[0] = '{'; memcpy(str+1, expr_start, len); - str[len+1] = ')'; + str[len+1] = '}'; str[len+2] = 0; + int lines, cols; + fstring_find_expr_location(t, str, &lines, &cols); + + // The parentheses are needed in order to allow for leading whitespace withing + // the f-string expression. This consequently gets parsed as a group (see the + // group rule in python.gram). + str[0] = '('; + str[len+1] = ')'; + struct tok_state* tok = PyTokenizer_FromString(str, 1); if (tok == NULL) { PyMem_Free(str); @@ -613,21 +400,14 @@ fstring_compile_expr(Parser *p, const char *expr_start, const char *expr_end, Parser *p2 = _PyPegen_Parser_New(tok, Py_fstring_input, p->flags, p->feature_version, NULL, p->arena); - p2->starting_lineno = p->starting_lineno + p->tok->first_lineno - 1; - p2->starting_col_offset = p->tok->first_lineno == p->tok->lineno - ? p->starting_col_offset + t->col_offset : 0; + p2->starting_lineno = t->lineno + lines - 1; + p2->starting_col_offset = p->tok->first_lineno == p->tok->lineno ? t->col_offset + cols : cols; expr = _PyPegen_run_parser(p2); if (expr == NULL) { goto exit; } - - /* Reuse str to find the correct column offset. */ - str[0] = '{'; - str[len+1] = '}'; - fstring_fix_expr_location(t, expr, str); - result = expr; exit: diff --git a/Python/importlib.h b/Python/importlib.h index 59e0272b61dab..1fb877a753419 100644 --- a/Python/importlib.h +++ b/Python/importlib.h @@ -1594,219 +1594,219 @@ const unsigned char _Py_M__importlib_bootstrap[] = { 0,218,1,120,90,5,119,104,101,114,101,90,9,102,114,111, 109,95,110,97,109,101,90,3,101,120,99,114,10,0,0,0, 114,10,0,0,0,114,11,0,0,0,114,215,0,0,0,9, - 4,0,0,115,52,0,0,0,0,10,8,1,10,1,4,1, - 12,2,4,1,4,1,2,255,4,1,8,255,10,2,8,1, - 14,1,10,1,2,255,8,2,10,1,14,1,2,1,14,1, - 14,4,10,1,16,255,2,2,12,1,26,1,114,215,0,0, - 0,99,1,0,0,0,0,0,0,0,0,0,0,0,3,0, - 0,0,6,0,0,0,67,0,0,0,115,146,0,0,0,124, - 0,160,0,100,1,161,1,125,1,124,0,160,0,100,2,161, - 1,125,2,124,1,100,3,117,1,114,82,124,2,100,3,117, - 1,114,78,124,1,124,2,106,1,107,3,114,78,116,2,106, - 3,100,4,124,1,155,2,100,5,124,2,106,1,155,2,100, - 6,157,5,116,4,100,7,100,8,141,3,1,0,124,1,83, - 0,124,2,100,3,117,1,114,96,124,2,106,1,83,0,116, - 2,106,3,100,9,116,4,100,7,100,8,141,3,1,0,124, - 0,100,10,25,0,125,1,100,11,124,0,118,1,114,142,124, - 1,160,5,100,12,161,1,100,13,25,0,125,1,124,1,83, - 0,41,14,122,167,67,97,108,99,117,108,97,116,101,32,119, - 104,97,116,32,95,95,112,97,99,107,97,103,101,95,95,32, - 115,104,111,117,108,100,32,98,101,46,10,10,32,32,32,32, - 95,95,112,97,99,107,97,103,101,95,95,32,105,115,32,110, - 111,116,32,103,117,97,114,97,110,116,101,101,100,32,116,111, - 32,98,101,32,100,101,102,105,110,101,100,32,111,114,32,99, - 111,117,108,100,32,98,101,32,115,101,116,32,116,111,32,78, - 111,110,101,10,32,32,32,32,116,111,32,114,101,112,114,101, - 115,101,110,116,32,116,104,97,116,32,105,116,115,32,112,114, - 111,112,101,114,32,118,97,108,117,101,32,105,115,32,117,110, - 107,110,111,119,110,46,10,10,32,32,32,32,114,146,0,0, - 0,114,106,0,0,0,78,122,32,95,95,112,97,99,107,97, - 103,101,95,95,32,33,61,32,95,95,115,112,101,99,95,95, - 46,112,97,114,101,110,116,32,40,122,4,32,33,61,32,250, - 1,41,233,3,0,0,0,41,1,90,10,115,116,97,99,107, - 108,101,118,101,108,122,89,99,97,110,39,116,32,114,101,115, - 111,108,118,101,32,112,97,99,107,97,103,101,32,102,114,111, - 109,32,95,95,115,112,101,99,95,95,32,111,114,32,95,95, - 112,97,99,107,97,103,101,95,95,44,32,102,97,108,108,105, - 110,103,32,98,97,99,107,32,111,110,32,95,95,110,97,109, - 101,95,95,32,97,110,100,32,95,95,112,97,116,104,95,95, - 114,1,0,0,0,114,142,0,0,0,114,129,0,0,0,114, - 22,0,0,0,41,6,114,35,0,0,0,114,131,0,0,0, - 114,193,0,0,0,114,194,0,0,0,114,195,0,0,0,114, - 130,0,0,0,41,3,218,7,103,108,111,98,97,108,115,114, - 187,0,0,0,114,96,0,0,0,114,10,0,0,0,114,10, - 0,0,0,114,11,0,0,0,218,17,95,99,97,108,99,95, - 95,95,112,97,99,107,97,103,101,95,95,46,4,0,0,115, - 42,0,0,0,0,7,10,1,10,1,8,1,18,1,6,1, - 2,255,4,1,4,255,6,2,4,254,6,3,4,1,8,1, - 6,2,6,2,4,254,6,3,8,1,8,1,14,1,114,221, - 0,0,0,114,10,0,0,0,99,5,0,0,0,0,0,0, - 0,0,0,0,0,9,0,0,0,5,0,0,0,67,0,0, - 0,115,180,0,0,0,124,4,100,1,107,2,114,18,116,0, - 124,0,131,1,125,5,110,36,124,1,100,2,117,1,114,30, - 124,1,110,2,105,0,125,6,116,1,124,6,131,1,125,7, - 116,0,124,0,124,7,124,4,131,3,125,5,124,3,115,150, - 124,4,100,1,107,2,114,84,116,0,124,0,160,2,100,3, - 161,1,100,1,25,0,131,1,83,0,124,0,115,92,124,5, - 83,0,116,3,124,0,131,1,116,3,124,0,160,2,100,3, - 161,1,100,1,25,0,131,1,24,0,125,8,116,4,106,5, - 124,5,106,6,100,2,116,3,124,5,106,6,131,1,124,8, - 24,0,133,2,25,0,25,0,83,0,110,26,116,7,124,5, - 100,4,131,2,114,172,116,8,124,5,124,3,116,0,131,3, - 83,0,124,5,83,0,100,2,83,0,41,5,97,215,1,0, - 0,73,109,112,111,114,116,32,97,32,109,111,100,117,108,101, - 46,10,10,32,32,32,32,84,104,101,32,39,103,108,111,98, - 97,108,115,39,32,97,114,103,117,109,101,110,116,32,105,115, - 32,117,115,101,100,32,116,111,32,105,110,102,101,114,32,119, - 104,101,114,101,32,116,104,101,32,105,109,112,111,114,116,32, - 105,115,32,111,99,99,117,114,114,105,110,103,32,102,114,111, - 109,10,32,32,32,32,116,111,32,104,97,110,100,108,101,32, - 114,101,108,97,116,105,118,101,32,105,109,112,111,114,116,115, - 46,32,84,104,101,32,39,108,111,99,97,108,115,39,32,97, - 114,103,117,109,101,110,116,32,105,115,32,105,103,110,111,114, - 101,100,46,32,84,104,101,10,32,32,32,32,39,102,114,111, - 109,108,105,115,116,39,32,97,114,103,117,109,101,110,116,32, - 115,112,101,99,105,102,105,101,115,32,119,104,97,116,32,115, - 104,111,117,108,100,32,101,120,105,115,116,32,97,115,32,97, - 116,116,114,105,98,117,116,101,115,32,111,110,32,116,104,101, - 32,109,111,100,117,108,101,10,32,32,32,32,98,101,105,110, - 103,32,105,109,112,111,114,116,101,100,32,40,101,46,103,46, - 32,96,96,102,114,111,109,32,109,111,100,117,108,101,32,105, - 109,112,111,114,116,32,60,102,114,111,109,108,105,115,116,62, - 96,96,41,46,32,32,84,104,101,32,39,108,101,118,101,108, - 39,10,32,32,32,32,97,114,103,117,109,101,110,116,32,114, - 101,112,114,101,115,101,110,116,115,32,116,104,101,32,112,97, - 99,107,97,103,101,32,108,111,99,97,116,105,111,110,32,116, - 111,32,105,109,112,111,114,116,32,102,114,111,109,32,105,110, - 32,97,32,114,101,108,97,116,105,118,101,10,32,32,32,32, - 105,109,112,111,114,116,32,40,101,46,103,46,32,96,96,102, - 114,111,109,32,46,46,112,107,103,32,105,109,112,111,114,116, - 32,109,111,100,96,96,32,119,111,117,108,100,32,104,97,118, - 101,32,97,32,39,108,101,118,101,108,39,32,111,102,32,50, - 41,46,10,10,32,32,32,32,114,22,0,0,0,78,114,129, - 0,0,0,114,142,0,0,0,41,9,114,210,0,0,0,114, - 221,0,0,0,218,9,112,97,114,116,105,116,105,111,110,114, - 186,0,0,0,114,15,0,0,0,114,93,0,0,0,114,1, - 0,0,0,114,4,0,0,0,114,215,0,0,0,41,9,114, - 17,0,0,0,114,220,0,0,0,218,6,108,111,99,97,108, - 115,114,216,0,0,0,114,188,0,0,0,114,97,0,0,0, - 90,8,103,108,111,98,97,108,115,95,114,187,0,0,0,90, - 7,99,117,116,95,111,102,102,114,10,0,0,0,114,10,0, - 0,0,114,11,0,0,0,218,10,95,95,105,109,112,111,114, - 116,95,95,73,4,0,0,115,30,0,0,0,0,11,8,1, - 10,2,16,1,8,1,12,1,4,3,8,1,18,1,4,1, - 4,4,26,3,32,1,10,1,12,2,114,224,0,0,0,99, - 1,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 3,0,0,0,67,0,0,0,115,38,0,0,0,116,0,160, - 1,124,0,161,1,125,1,124,1,100,0,117,0,114,30,116, - 2,100,1,124,0,23,0,131,1,130,1,116,3,124,1,131, - 1,83,0,41,2,78,122,25,110,111,32,98,117,105,108,116, - 45,105,110,32,109,111,100,117,108,101,32,110,97,109,101,100, - 32,41,4,114,161,0,0,0,114,168,0,0,0,114,80,0, - 0,0,114,160,0,0,0,41,2,114,17,0,0,0,114,96, - 0,0,0,114,10,0,0,0,114,10,0,0,0,114,11,0, - 0,0,218,18,95,98,117,105,108,116,105,110,95,102,114,111, - 109,95,110,97,109,101,110,4,0,0,115,8,0,0,0,0, - 1,10,1,8,1,12,1,114,225,0,0,0,99,2,0,0, - 0,0,0,0,0,0,0,0,0,10,0,0,0,5,0,0, - 0,67,0,0,0,115,166,0,0,0,124,1,97,0,124,0, - 97,1,116,2,116,1,131,1,125,2,116,1,106,3,160,4, - 161,0,68,0,93,72,92,2,125,3,125,4,116,5,124,4, - 124,2,131,2,114,26,124,3,116,1,106,6,118,0,114,60, - 116,7,125,5,110,18,116,0,160,8,124,3,161,1,114,26, - 116,9,125,5,110,2,113,26,116,10,124,4,124,5,131,2, - 125,6,116,11,124,6,124,4,131,2,1,0,113,26,116,1, - 106,3,116,12,25,0,125,7,100,1,68,0,93,46,125,8, - 124,8,116,1,106,3,118,1,114,138,116,13,124,8,131,1, - 125,9,110,10,116,1,106,3,124,8,25,0,125,9,116,14, - 124,7,124,8,124,9,131,3,1,0,113,114,100,2,83,0, - 41,3,122,250,83,101,116,117,112,32,105,109,112,111,114,116, - 108,105,98,32,98,121,32,105,109,112,111,114,116,105,110,103, - 32,110,101,101,100,101,100,32,98,117,105,108,116,45,105,110, - 32,109,111,100,117,108,101,115,32,97,110,100,32,105,110,106, - 101,99,116,105,110,103,32,116,104,101,109,10,32,32,32,32, - 105,110,116,111,32,116,104,101,32,103,108,111,98,97,108,32, - 110,97,109,101,115,112,97,99,101,46,10,10,32,32,32,32, - 65,115,32,115,121,115,32,105,115,32,110,101,101,100,101,100, - 32,102,111,114,32,115,121,115,46,109,111,100,117,108,101,115, - 32,97,99,99,101,115,115,32,97,110,100,32,95,105,109,112, - 32,105,115,32,110,101,101,100,101,100,32,116,111,32,108,111, - 97,100,32,98,117,105,108,116,45,105,110,10,32,32,32,32, - 109,111,100,117,108,101,115,44,32,116,104,111,115,101,32,116, - 119,111,32,109,111,100,117,108,101,115,32,109,117,115,116,32, - 98,101,32,101,120,112,108,105,99,105,116,108,121,32,112,97, - 115,115,101,100,32,105,110,46,10,10,32,32,32,32,41,3, - 114,23,0,0,0,114,193,0,0,0,114,65,0,0,0,78, - 41,15,114,58,0,0,0,114,15,0,0,0,114,14,0,0, - 0,114,93,0,0,0,218,5,105,116,101,109,115,114,197,0, - 0,0,114,79,0,0,0,114,161,0,0,0,114,89,0,0, - 0,114,175,0,0,0,114,143,0,0,0,114,149,0,0,0, - 114,1,0,0,0,114,225,0,0,0,114,5,0,0,0,41, - 10,218,10,115,121,115,95,109,111,100,117,108,101,218,11,95, - 105,109,112,95,109,111,100,117,108,101,90,11,109,111,100,117, - 108,101,95,116,121,112,101,114,17,0,0,0,114,97,0,0, - 0,114,110,0,0,0,114,96,0,0,0,90,11,115,101,108, - 102,95,109,111,100,117,108,101,90,12,98,117,105,108,116,105, - 110,95,110,97,109,101,90,14,98,117,105,108,116,105,110,95, - 109,111,100,117,108,101,114,10,0,0,0,114,10,0,0,0, - 114,11,0,0,0,218,6,95,115,101,116,117,112,117,4,0, - 0,115,36,0,0,0,0,9,4,1,4,3,8,1,18,1, - 10,1,10,1,6,1,10,1,6,2,2,1,10,1,12,3, - 10,1,8,1,10,1,10,2,10,1,114,229,0,0,0,99, - 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 3,0,0,0,67,0,0,0,115,38,0,0,0,116,0,124, - 0,124,1,131,2,1,0,116,1,106,2,160,3,116,4,161, - 1,1,0,116,1,106,2,160,3,116,5,161,1,1,0,100, - 1,83,0,41,2,122,48,73,110,115,116,97,108,108,32,105, - 109,112,111,114,116,101,114,115,32,102,111,114,32,98,117,105, - 108,116,105,110,32,97,110,100,32,102,114,111,122,101,110,32, - 109,111,100,117,108,101,115,78,41,6,114,229,0,0,0,114, - 15,0,0,0,114,192,0,0,0,114,120,0,0,0,114,161, - 0,0,0,114,175,0,0,0,41,2,114,227,0,0,0,114, - 228,0,0,0,114,10,0,0,0,114,10,0,0,0,114,11, - 0,0,0,218,8,95,105,110,115,116,97,108,108,152,4,0, - 0,115,6,0,0,0,0,2,10,2,12,1,114,230,0,0, - 0,99,0,0,0,0,0,0,0,0,0,0,0,0,1,0, - 0,0,4,0,0,0,67,0,0,0,115,32,0,0,0,100, - 1,100,2,108,0,125,0,124,0,97,1,124,0,160,2,116, - 3,106,4,116,5,25,0,161,1,1,0,100,2,83,0,41, - 3,122,57,73,110,115,116,97,108,108,32,105,109,112,111,114, - 116,101,114,115,32,116,104,97,116,32,114,101,113,117,105,114, - 101,32,101,120,116,101,114,110,97,108,32,102,105,108,101,115, - 121,115,116,101,109,32,97,99,99,101,115,115,114,22,0,0, - 0,78,41,6,218,26,95,102,114,111,122,101,110,95,105,109, - 112,111,114,116,108,105,98,95,101,120,116,101,114,110,97,108, - 114,127,0,0,0,114,230,0,0,0,114,15,0,0,0,114, - 93,0,0,0,114,1,0,0,0,41,1,114,231,0,0,0, + 4,0,0,115,48,0,0,0,0,10,8,1,10,1,4,1, + 12,2,4,1,10,1,8,255,10,2,8,1,14,1,10,1, + 2,255,8,2,10,1,14,1,2,1,14,1,14,4,10,1, + 16,255,2,2,12,1,26,1,114,215,0,0,0,99,1,0, + 0,0,0,0,0,0,0,0,0,0,3,0,0,0,6,0, + 0,0,67,0,0,0,115,146,0,0,0,124,0,160,0,100, + 1,161,1,125,1,124,0,160,0,100,2,161,1,125,2,124, + 1,100,3,117,1,114,82,124,2,100,3,117,1,114,78,124, + 1,124,2,106,1,107,3,114,78,116,2,106,3,100,4,124, + 1,155,2,100,5,124,2,106,1,155,2,100,6,157,5,116, + 4,100,7,100,8,141,3,1,0,124,1,83,0,124,2,100, + 3,117,1,114,96,124,2,106,1,83,0,116,2,106,3,100, + 9,116,4,100,7,100,8,141,3,1,0,124,0,100,10,25, + 0,125,1,100,11,124,0,118,1,114,142,124,1,160,5,100, + 12,161,1,100,13,25,0,125,1,124,1,83,0,41,14,122, + 167,67,97,108,99,117,108,97,116,101,32,119,104,97,116,32, + 95,95,112,97,99,107,97,103,101,95,95,32,115,104,111,117, + 108,100,32,98,101,46,10,10,32,32,32,32,95,95,112,97, + 99,107,97,103,101,95,95,32,105,115,32,110,111,116,32,103, + 117,97,114,97,110,116,101,101,100,32,116,111,32,98,101,32, + 100,101,102,105,110,101,100,32,111,114,32,99,111,117,108,100, + 32,98,101,32,115,101,116,32,116,111,32,78,111,110,101,10, + 32,32,32,32,116,111,32,114,101,112,114,101,115,101,110,116, + 32,116,104,97,116,32,105,116,115,32,112,114,111,112,101,114, + 32,118,97,108,117,101,32,105,115,32,117,110,107,110,111,119, + 110,46,10,10,32,32,32,32,114,146,0,0,0,114,106,0, + 0,0,78,122,32,95,95,112,97,99,107,97,103,101,95,95, + 32,33,61,32,95,95,115,112,101,99,95,95,46,112,97,114, + 101,110,116,32,40,122,4,32,33,61,32,250,1,41,233,3, + 0,0,0,41,1,90,10,115,116,97,99,107,108,101,118,101, + 108,122,89,99,97,110,39,116,32,114,101,115,111,108,118,101, + 32,112,97,99,107,97,103,101,32,102,114,111,109,32,95,95, + 115,112,101,99,95,95,32,111,114,32,95,95,112,97,99,107, + 97,103,101,95,95,44,32,102,97,108,108,105,110,103,32,98, + 97,99,107,32,111,110,32,95,95,110,97,109,101,95,95,32, + 97,110,100,32,95,95,112,97,116,104,95,95,114,1,0,0, + 0,114,142,0,0,0,114,129,0,0,0,114,22,0,0,0, + 41,6,114,35,0,0,0,114,131,0,0,0,114,193,0,0, + 0,114,194,0,0,0,114,195,0,0,0,114,130,0,0,0, + 41,3,218,7,103,108,111,98,97,108,115,114,187,0,0,0, + 114,96,0,0,0,114,10,0,0,0,114,10,0,0,0,114, + 11,0,0,0,218,17,95,99,97,108,99,95,95,95,112,97, + 99,107,97,103,101,95,95,46,4,0,0,115,42,0,0,0, + 0,7,10,1,10,1,8,1,18,1,6,1,2,255,4,1, + 4,255,6,2,4,254,6,3,4,1,8,1,6,2,6,2, + 4,254,6,3,8,1,8,1,14,1,114,221,0,0,0,114, + 10,0,0,0,99,5,0,0,0,0,0,0,0,0,0,0, + 0,9,0,0,0,5,0,0,0,67,0,0,0,115,180,0, + 0,0,124,4,100,1,107,2,114,18,116,0,124,0,131,1, + 125,5,110,36,124,1,100,2,117,1,114,30,124,1,110,2, + 105,0,125,6,116,1,124,6,131,1,125,7,116,0,124,0, + 124,7,124,4,131,3,125,5,124,3,115,150,124,4,100,1, + 107,2,114,84,116,0,124,0,160,2,100,3,161,1,100,1, + 25,0,131,1,83,0,124,0,115,92,124,5,83,0,116,3, + 124,0,131,1,116,3,124,0,160,2,100,3,161,1,100,1, + 25,0,131,1,24,0,125,8,116,4,106,5,124,5,106,6, + 100,2,116,3,124,5,106,6,131,1,124,8,24,0,133,2, + 25,0,25,0,83,0,110,26,116,7,124,5,100,4,131,2, + 114,172,116,8,124,5,124,3,116,0,131,3,83,0,124,5, + 83,0,100,2,83,0,41,5,97,215,1,0,0,73,109,112, + 111,114,116,32,97,32,109,111,100,117,108,101,46,10,10,32, + 32,32,32,84,104,101,32,39,103,108,111,98,97,108,115,39, + 32,97,114,103,117,109,101,110,116,32,105,115,32,117,115,101, + 100,32,116,111,32,105,110,102,101,114,32,119,104,101,114,101, + 32,116,104,101,32,105,109,112,111,114,116,32,105,115,32,111, + 99,99,117,114,114,105,110,103,32,102,114,111,109,10,32,32, + 32,32,116,111,32,104,97,110,100,108,101,32,114,101,108,97, + 116,105,118,101,32,105,109,112,111,114,116,115,46,32,84,104, + 101,32,39,108,111,99,97,108,115,39,32,97,114,103,117,109, + 101,110,116,32,105,115,32,105,103,110,111,114,101,100,46,32, + 84,104,101,10,32,32,32,32,39,102,114,111,109,108,105,115, + 116,39,32,97,114,103,117,109,101,110,116,32,115,112,101,99, + 105,102,105,101,115,32,119,104,97,116,32,115,104,111,117,108, + 100,32,101,120,105,115,116,32,97,115,32,97,116,116,114,105, + 98,117,116,101,115,32,111,110,32,116,104,101,32,109,111,100, + 117,108,101,10,32,32,32,32,98,101,105,110,103,32,105,109, + 112,111,114,116,101,100,32,40,101,46,103,46,32,96,96,102, + 114,111,109,32,109,111,100,117,108,101,32,105,109,112,111,114, + 116,32,60,102,114,111,109,108,105,115,116,62,96,96,41,46, + 32,32,84,104,101,32,39,108,101,118,101,108,39,10,32,32, + 32,32,97,114,103,117,109,101,110,116,32,114,101,112,114,101, + 115,101,110,116,115,32,116,104,101,32,112,97,99,107,97,103, + 101,32,108,111,99,97,116,105,111,110,32,116,111,32,105,109, + 112,111,114,116,32,102,114,111,109,32,105,110,32,97,32,114, + 101,108,97,116,105,118,101,10,32,32,32,32,105,109,112,111, + 114,116,32,40,101,46,103,46,32,96,96,102,114,111,109,32, + 46,46,112,107,103,32,105,109,112,111,114,116,32,109,111,100, + 96,96,32,119,111,117,108,100,32,104,97,118,101,32,97,32, + 39,108,101,118,101,108,39,32,111,102,32,50,41,46,10,10, + 32,32,32,32,114,22,0,0,0,78,114,129,0,0,0,114, + 142,0,0,0,41,9,114,210,0,0,0,114,221,0,0,0, + 218,9,112,97,114,116,105,116,105,111,110,114,186,0,0,0, + 114,15,0,0,0,114,93,0,0,0,114,1,0,0,0,114, + 4,0,0,0,114,215,0,0,0,41,9,114,17,0,0,0, + 114,220,0,0,0,218,6,108,111,99,97,108,115,114,216,0, + 0,0,114,188,0,0,0,114,97,0,0,0,90,8,103,108, + 111,98,97,108,115,95,114,187,0,0,0,90,7,99,117,116, + 95,111,102,102,114,10,0,0,0,114,10,0,0,0,114,11, + 0,0,0,218,10,95,95,105,109,112,111,114,116,95,95,73, + 4,0,0,115,30,0,0,0,0,11,8,1,10,2,16,1, + 8,1,12,1,4,3,8,1,18,1,4,1,4,4,26,3, + 32,1,10,1,12,2,114,224,0,0,0,99,1,0,0,0, + 0,0,0,0,0,0,0,0,2,0,0,0,3,0,0,0, + 67,0,0,0,115,38,0,0,0,116,0,160,1,124,0,161, + 1,125,1,124,1,100,0,117,0,114,30,116,2,100,1,124, + 0,23,0,131,1,130,1,116,3,124,1,131,1,83,0,41, + 2,78,122,25,110,111,32,98,117,105,108,116,45,105,110,32, + 109,111,100,117,108,101,32,110,97,109,101,100,32,41,4,114, + 161,0,0,0,114,168,0,0,0,114,80,0,0,0,114,160, + 0,0,0,41,2,114,17,0,0,0,114,96,0,0,0,114, + 10,0,0,0,114,10,0,0,0,114,11,0,0,0,218,18, + 95,98,117,105,108,116,105,110,95,102,114,111,109,95,110,97, + 109,101,110,4,0,0,115,8,0,0,0,0,1,10,1,8, + 1,12,1,114,225,0,0,0,99,2,0,0,0,0,0,0, + 0,0,0,0,0,10,0,0,0,5,0,0,0,67,0,0, + 0,115,166,0,0,0,124,1,97,0,124,0,97,1,116,2, + 116,1,131,1,125,2,116,1,106,3,160,4,161,0,68,0, + 93,72,92,2,125,3,125,4,116,5,124,4,124,2,131,2, + 114,26,124,3,116,1,106,6,118,0,114,60,116,7,125,5, + 110,18,116,0,160,8,124,3,161,1,114,26,116,9,125,5, + 110,2,113,26,116,10,124,4,124,5,131,2,125,6,116,11, + 124,6,124,4,131,2,1,0,113,26,116,1,106,3,116,12, + 25,0,125,7,100,1,68,0,93,46,125,8,124,8,116,1, + 106,3,118,1,114,138,116,13,124,8,131,1,125,9,110,10, + 116,1,106,3,124,8,25,0,125,9,116,14,124,7,124,8, + 124,9,131,3,1,0,113,114,100,2,83,0,41,3,122,250, + 83,101,116,117,112,32,105,109,112,111,114,116,108,105,98,32, + 98,121,32,105,109,112,111,114,116,105,110,103,32,110,101,101, + 100,101,100,32,98,117,105,108,116,45,105,110,32,109,111,100, + 117,108,101,115,32,97,110,100,32,105,110,106,101,99,116,105, + 110,103,32,116,104,101,109,10,32,32,32,32,105,110,116,111, + 32,116,104,101,32,103,108,111,98,97,108,32,110,97,109,101, + 115,112,97,99,101,46,10,10,32,32,32,32,65,115,32,115, + 121,115,32,105,115,32,110,101,101,100,101,100,32,102,111,114, + 32,115,121,115,46,109,111,100,117,108,101,115,32,97,99,99, + 101,115,115,32,97,110,100,32,95,105,109,112,32,105,115,32, + 110,101,101,100,101,100,32,116,111,32,108,111,97,100,32,98, + 117,105,108,116,45,105,110,10,32,32,32,32,109,111,100,117, + 108,101,115,44,32,116,104,111,115,101,32,116,119,111,32,109, + 111,100,117,108,101,115,32,109,117,115,116,32,98,101,32,101, + 120,112,108,105,99,105,116,108,121,32,112,97,115,115,101,100, + 32,105,110,46,10,10,32,32,32,32,41,3,114,23,0,0, + 0,114,193,0,0,0,114,65,0,0,0,78,41,15,114,58, + 0,0,0,114,15,0,0,0,114,14,0,0,0,114,93,0, + 0,0,218,5,105,116,101,109,115,114,197,0,0,0,114,79, + 0,0,0,114,161,0,0,0,114,89,0,0,0,114,175,0, + 0,0,114,143,0,0,0,114,149,0,0,0,114,1,0,0, + 0,114,225,0,0,0,114,5,0,0,0,41,10,218,10,115, + 121,115,95,109,111,100,117,108,101,218,11,95,105,109,112,95, + 109,111,100,117,108,101,90,11,109,111,100,117,108,101,95,116, + 121,112,101,114,17,0,0,0,114,97,0,0,0,114,110,0, + 0,0,114,96,0,0,0,90,11,115,101,108,102,95,109,111, + 100,117,108,101,90,12,98,117,105,108,116,105,110,95,110,97, + 109,101,90,14,98,117,105,108,116,105,110,95,109,111,100,117, + 108,101,114,10,0,0,0,114,10,0,0,0,114,11,0,0, + 0,218,6,95,115,101,116,117,112,117,4,0,0,115,36,0, + 0,0,0,9,4,1,4,3,8,1,18,1,10,1,10,1, + 6,1,10,1,6,2,2,1,10,1,12,3,10,1,8,1, + 10,1,10,2,10,1,114,229,0,0,0,99,2,0,0,0, + 0,0,0,0,0,0,0,0,2,0,0,0,3,0,0,0, + 67,0,0,0,115,38,0,0,0,116,0,124,0,124,1,131, + 2,1,0,116,1,106,2,160,3,116,4,161,1,1,0,116, + 1,106,2,160,3,116,5,161,1,1,0,100,1,83,0,41, + 2,122,48,73,110,115,116,97,108,108,32,105,109,112,111,114, + 116,101,114,115,32,102,111,114,32,98,117,105,108,116,105,110, + 32,97,110,100,32,102,114,111,122,101,110,32,109,111,100,117, + 108,101,115,78,41,6,114,229,0,0,0,114,15,0,0,0, + 114,192,0,0,0,114,120,0,0,0,114,161,0,0,0,114, + 175,0,0,0,41,2,114,227,0,0,0,114,228,0,0,0, 114,10,0,0,0,114,10,0,0,0,114,11,0,0,0,218, - 27,95,105,110,115,116,97,108,108,95,101,120,116,101,114,110, - 97,108,95,105,109,112,111,114,116,101,114,115,160,4,0,0, - 115,6,0,0,0,0,3,8,1,4,1,114,232,0,0,0, - 41,2,78,78,41,1,78,41,2,78,114,22,0,0,0,41, - 4,78,78,114,10,0,0,0,114,22,0,0,0,41,50,114, - 3,0,0,0,114,127,0,0,0,114,12,0,0,0,114,18, - 0,0,0,114,60,0,0,0,114,34,0,0,0,114,44,0, - 0,0,114,19,0,0,0,114,20,0,0,0,114,50,0,0, - 0,114,51,0,0,0,114,54,0,0,0,114,66,0,0,0, - 114,68,0,0,0,114,77,0,0,0,114,87,0,0,0,114, - 91,0,0,0,114,98,0,0,0,114,112,0,0,0,114,113, - 0,0,0,114,92,0,0,0,114,143,0,0,0,114,149,0, - 0,0,114,153,0,0,0,114,108,0,0,0,114,94,0,0, - 0,114,159,0,0,0,114,160,0,0,0,114,95,0,0,0, - 114,161,0,0,0,114,175,0,0,0,114,180,0,0,0,114, - 189,0,0,0,114,191,0,0,0,114,196,0,0,0,114,202, - 0,0,0,90,15,95,69,82,82,95,77,83,71,95,80,82, - 69,70,73,88,114,204,0,0,0,114,207,0,0,0,218,6, - 111,98,106,101,99,116,114,208,0,0,0,114,209,0,0,0, - 114,210,0,0,0,114,215,0,0,0,114,221,0,0,0,114, - 224,0,0,0,114,225,0,0,0,114,229,0,0,0,114,230, - 0,0,0,114,232,0,0,0,114,10,0,0,0,114,10,0, - 0,0,114,10,0,0,0,114,11,0,0,0,218,8,60,109, - 111,100,117,108,101,62,1,0,0,0,115,94,0,0,0,4, - 24,4,2,8,8,8,8,4,2,4,3,16,4,14,77,14, - 21,14,16,8,37,8,17,8,11,14,8,8,11,8,12,8, - 16,8,36,14,101,16,26,10,45,14,72,8,17,8,17,8, - 30,8,37,8,42,8,15,14,75,14,79,14,13,8,9,8, - 9,10,47,8,16,4,1,8,2,8,32,6,3,8,16,10, - 15,14,37,8,27,10,37,8,7,8,35,8,8, + 8,95,105,110,115,116,97,108,108,152,4,0,0,115,6,0, + 0,0,0,2,10,2,12,1,114,230,0,0,0,99,0,0, + 0,0,0,0,0,0,0,0,0,0,1,0,0,0,4,0, + 0,0,67,0,0,0,115,32,0,0,0,100,1,100,2,108, + 0,125,0,124,0,97,1,124,0,160,2,116,3,106,4,116, + 5,25,0,161,1,1,0,100,2,83,0,41,3,122,57,73, + 110,115,116,97,108,108,32,105,109,112,111,114,116,101,114,115, + 32,116,104,97,116,32,114,101,113,117,105,114,101,32,101,120, + 116,101,114,110,97,108,32,102,105,108,101,115,121,115,116,101, + 109,32,97,99,99,101,115,115,114,22,0,0,0,78,41,6, + 218,26,95,102,114,111,122,101,110,95,105,109,112,111,114,116, + 108,105,98,95,101,120,116,101,114,110,97,108,114,127,0,0, + 0,114,230,0,0,0,114,15,0,0,0,114,93,0,0,0, + 114,1,0,0,0,41,1,114,231,0,0,0,114,10,0,0, + 0,114,10,0,0,0,114,11,0,0,0,218,27,95,105,110, + 115,116,97,108,108,95,101,120,116,101,114,110,97,108,95,105, + 109,112,111,114,116,101,114,115,160,4,0,0,115,6,0,0, + 0,0,3,8,1,4,1,114,232,0,0,0,41,2,78,78, + 41,1,78,41,2,78,114,22,0,0,0,41,4,78,78,114, + 10,0,0,0,114,22,0,0,0,41,50,114,3,0,0,0, + 114,127,0,0,0,114,12,0,0,0,114,18,0,0,0,114, + 60,0,0,0,114,34,0,0,0,114,44,0,0,0,114,19, + 0,0,0,114,20,0,0,0,114,50,0,0,0,114,51,0, + 0,0,114,54,0,0,0,114,66,0,0,0,114,68,0,0, + 0,114,77,0,0,0,114,87,0,0,0,114,91,0,0,0, + 114,98,0,0,0,114,112,0,0,0,114,113,0,0,0,114, + 92,0,0,0,114,143,0,0,0,114,149,0,0,0,114,153, + 0,0,0,114,108,0,0,0,114,94,0,0,0,114,159,0, + 0,0,114,160,0,0,0,114,95,0,0,0,114,161,0,0, + 0,114,175,0,0,0,114,180,0,0,0,114,189,0,0,0, + 114,191,0,0,0,114,196,0,0,0,114,202,0,0,0,90, + 15,95,69,82,82,95,77,83,71,95,80,82,69,70,73,88, + 114,204,0,0,0,114,207,0,0,0,218,6,111,98,106,101, + 99,116,114,208,0,0,0,114,209,0,0,0,114,210,0,0, + 0,114,215,0,0,0,114,221,0,0,0,114,224,0,0,0, + 114,225,0,0,0,114,229,0,0,0,114,230,0,0,0,114, + 232,0,0,0,114,10,0,0,0,114,10,0,0,0,114,10, + 0,0,0,114,11,0,0,0,218,8,60,109,111,100,117,108, + 101,62,1,0,0,0,115,94,0,0,0,4,24,4,2,8, + 8,8,8,4,2,4,3,16,4,14,77,14,21,14,16,8, + 37,8,17,8,11,14,8,8,11,8,12,8,16,8,36,14, + 101,16,26,10,45,14,72,8,17,8,17,8,30,8,37,8, + 42,8,15,14,75,14,79,14,13,8,9,8,9,10,47,8, + 16,4,1,8,2,8,32,6,3,8,16,10,15,14,37,8, + 27,10,37,8,7,8,35,8,8, }; diff --git a/Python/importlib_external.h b/Python/importlib_external.h index 4d08e01b138c3..a5a7c383d785e 100644 --- a/Python/importlib_external.h +++ b/Python/importlib_external.h @@ -481,2210 +481,2210 @@ const unsigned char _Py_M__importlib_bootstrap_external[] = { 108,101,118,101,108,90,13,98,97,115,101,95,102,105,108,101, 110,97,109,101,114,5,0,0,0,114,5,0,0,0,114,8, 0,0,0,218,17,115,111,117,114,99,101,95,102,114,111,109, - 95,99,97,99,104,101,116,1,0,0,115,68,0,0,0,0, + 95,99,97,99,104,101,116,1,0,0,115,60,0,0,0,0, 9,12,1,8,1,10,1,12,1,4,1,10,1,12,1,14, - 1,16,1,4,1,4,1,12,1,8,1,2,1,2,255,4, - 1,2,255,8,2,10,1,8,1,16,1,10,1,16,1,10, - 1,4,1,2,255,8,2,16,1,10,1,4,1,2,255,10, - 2,14,1,114,102,0,0,0,99,1,0,0,0,0,0,0, - 0,0,0,0,0,5,0,0,0,9,0,0,0,67,0,0, - 0,115,124,0,0,0,116,0,124,0,131,1,100,1,107,2, - 114,16,100,2,83,0,124,0,160,1,100,3,161,1,92,3, - 125,1,125,2,125,3,124,1,114,56,124,3,160,2,161,0, - 100,4,100,5,133,2,25,0,100,6,107,3,114,60,124,0, - 83,0,122,12,116,3,124,0,131,1,125,4,87,0,110,34, - 4,0,116,4,116,5,102,2,121,106,1,0,1,0,1,0, - 124,0,100,2,100,5,133,2,25,0,125,4,89,0,110,2, - 48,0,116,6,124,4,131,1,114,120,124,4,83,0,124,0, - 83,0,41,7,122,188,67,111,110,118,101,114,116,32,97,32, - 98,121,116,101,99,111,100,101,32,102,105,108,101,32,112,97, - 116,104,32,116,111,32,97,32,115,111,117,114,99,101,32,112, - 97,116,104,32,40,105,102,32,112,111,115,115,105,98,108,101, - 41,46,10,10,32,32,32,32,84,104,105,115,32,102,117,110, - 99,116,105,111,110,32,101,120,105,115,116,115,32,112,117,114, - 101,108,121,32,102,111,114,32,98,97,99,107,119,97,114,100, - 115,45,99,111,109,112,97,116,105,98,105,108,105,116,121,32, - 102,111,114,10,32,32,32,32,80,121,73,109,112,111,114,116, - 95,69,120,101,99,67,111,100,101,77,111,100,117,108,101,87, - 105,116,104,70,105,108,101,110,97,109,101,115,40,41,32,105, - 110,32,116,104,101,32,67,32,65,80,73,46,10,10,32,32, - 32,32,114,73,0,0,0,78,114,71,0,0,0,233,253,255, - 255,255,233,255,255,255,255,90,2,112,121,41,7,114,23,0, - 0,0,114,41,0,0,0,218,5,108,111,119,101,114,114,102, - 0,0,0,114,82,0,0,0,114,86,0,0,0,114,54,0, - 0,0,41,5,218,13,98,121,116,101,99,111,100,101,95,112, - 97,116,104,114,95,0,0,0,114,45,0,0,0,90,9,101, - 120,116,101,110,115,105,111,110,218,11,115,111,117,114,99,101, - 95,112,97,116,104,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,218,15,95,103,101,116,95,115,111,117,114,99, - 101,102,105,108,101,156,1,0,0,115,20,0,0,0,0,7, - 12,1,4,1,16,1,24,1,4,1,2,1,12,1,16,1, - 18,1,114,108,0,0,0,99,1,0,0,0,0,0,0,0, - 0,0,0,0,1,0,0,0,8,0,0,0,67,0,0,0, - 115,72,0,0,0,124,0,160,0,116,1,116,2,131,1,161, - 1,114,46,122,10,116,3,124,0,131,1,87,0,83,0,4, - 0,116,4,121,42,1,0,1,0,1,0,89,0,113,68,48, - 0,110,22,124,0,160,0,116,1,116,5,131,1,161,1,114, - 64,124,0,83,0,100,0,83,0,100,0,83,0,169,1,78, - 41,6,218,8,101,110,100,115,119,105,116,104,218,5,116,117, - 112,108,101,114,101,0,0,0,114,97,0,0,0,114,82,0, - 0,0,114,88,0,0,0,41,1,114,96,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,218,11,95, - 103,101,116,95,99,97,99,104,101,100,175,1,0,0,115,16, - 0,0,0,0,1,14,1,2,1,10,1,12,1,8,1,14, - 1,4,2,114,112,0,0,0,99,1,0,0,0,0,0,0, - 0,0,0,0,0,2,0,0,0,8,0,0,0,67,0,0, - 0,115,50,0,0,0,122,14,116,0,124,0,131,1,106,1, - 125,1,87,0,110,22,4,0,116,2,121,36,1,0,1,0, - 1,0,100,1,125,1,89,0,110,2,48,0,124,1,100,2, - 79,0,125,1,124,1,83,0,41,3,122,51,67,97,108,99, - 117,108,97,116,101,32,116,104,101,32,109,111,100,101,32,112, - 101,114,109,105,115,115,105,111,110,115,32,102,111,114,32,97, - 32,98,121,116,101,99,111,100,101,32,102,105,108,101,46,114, - 60,0,0,0,233,128,0,0,0,41,3,114,49,0,0,0, - 114,51,0,0,0,114,50,0,0,0,41,2,114,44,0,0, - 0,114,52,0,0,0,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,218,10,95,99,97,108,99,95,109,111,100, - 101,187,1,0,0,115,12,0,0,0,0,2,2,1,14,1, - 12,1,10,3,8,1,114,114,0,0,0,99,1,0,0,0, - 0,0,0,0,0,0,0,0,3,0,0,0,8,0,0,0, - 3,0,0,0,115,66,0,0,0,100,6,135,0,102,1,100, - 2,100,3,132,9,125,1,122,10,116,0,106,1,125,2,87, - 0,110,26,4,0,116,2,121,50,1,0,1,0,1,0,100, - 4,100,5,132,0,125,2,89,0,110,2,48,0,124,2,124, - 1,136,0,131,2,1,0,124,1,83,0,41,7,122,252,68, - 101,99,111,114,97,116,111,114,32,116,111,32,118,101,114,105, - 102,121,32,116,104,97,116,32,116,104,101,32,109,111,100,117, - 108,101,32,98,101,105,110,103,32,114,101,113,117,101,115,116, - 101,100,32,109,97,116,99,104,101,115,32,116,104,101,32,111, - 110,101,32,116,104,101,10,32,32,32,32,108,111,97,100,101, - 114,32,99,97,110,32,104,97,110,100,108,101,46,10,10,32, - 32,32,32,84,104,101,32,102,105,114,115,116,32,97,114,103, - 117,109,101,110,116,32,40,115,101,108,102,41,32,109,117,115, - 116,32,100,101,102,105,110,101,32,95,110,97,109,101,32,119, - 104,105,99,104,32,116,104,101,32,115,101,99,111,110,100,32, - 97,114,103,117,109,101,110,116,32,105,115,10,32,32,32,32, - 99,111,109,112,97,114,101,100,32,97,103,97,105,110,115,116, - 46,32,73,102,32,116,104,101,32,99,111,109,112,97,114,105, - 115,111,110,32,102,97,105,108,115,32,116,104,101,110,32,73, - 109,112,111,114,116,69,114,114,111,114,32,105,115,32,114,97, - 105,115,101,100,46,10,10,32,32,32,32,78,99,2,0,0, - 0,0,0,0,0,0,0,0,0,4,0,0,0,4,0,0, - 0,31,0,0,0,115,72,0,0,0,124,1,100,0,117,0, - 114,16,124,0,106,0,125,1,110,32,124,0,106,0,124,1, - 107,3,114,48,116,1,100,1,124,0,106,0,124,1,102,2, - 22,0,124,1,100,2,141,2,130,1,136,0,124,0,124,1, - 103,2,124,2,162,1,82,0,105,0,124,3,164,1,142,1, - 83,0,41,3,78,122,30,108,111,97,100,101,114,32,102,111, - 114,32,37,115,32,99,97,110,110,111,116,32,104,97,110,100, - 108,101,32,37,115,169,1,218,4,110,97,109,101,41,2,114, - 116,0,0,0,218,11,73,109,112,111,114,116,69,114,114,111, - 114,41,4,218,4,115,101,108,102,114,116,0,0,0,218,4, - 97,114,103,115,218,6,107,119,97,114,103,115,169,1,218,6, - 109,101,116,104,111,100,114,5,0,0,0,114,8,0,0,0, - 218,19,95,99,104,101,99,107,95,110,97,109,101,95,119,114, - 97,112,112,101,114,207,1,0,0,115,18,0,0,0,0,1, - 8,1,8,1,10,1,4,1,8,255,2,1,2,255,6,2, - 122,40,95,99,104,101,99,107,95,110,97,109,101,46,60,108, - 111,99,97,108,115,62,46,95,99,104,101,99,107,95,110,97, - 109,101,95,119,114,97,112,112,101,114,99,2,0,0,0,0, - 0,0,0,0,0,0,0,3,0,0,0,7,0,0,0,83, - 0,0,0,115,56,0,0,0,100,1,68,0,93,32,125,2, - 116,0,124,1,124,2,131,2,114,4,116,1,124,0,124,2, - 116,2,124,1,124,2,131,2,131,3,1,0,113,4,124,0, - 106,3,160,4,124,1,106,3,161,1,1,0,100,0,83,0, - 41,2,78,41,4,218,10,95,95,109,111,100,117,108,101,95, - 95,218,8,95,95,110,97,109,101,95,95,218,12,95,95,113, - 117,97,108,110,97,109,101,95,95,218,7,95,95,100,111,99, - 95,95,41,5,218,7,104,97,115,97,116,116,114,218,7,115, - 101,116,97,116,116,114,218,7,103,101,116,97,116,116,114,218, - 8,95,95,100,105,99,116,95,95,218,6,117,112,100,97,116, - 101,41,3,90,3,110,101,119,90,3,111,108,100,114,67,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,218,5,95,119,114,97,112,218,1,0,0,115,8,0,0, - 0,0,1,8,1,10,1,20,1,122,26,95,99,104,101,99, - 107,95,110,97,109,101,46,60,108,111,99,97,108,115,62,46, - 95,119,114,97,112,41,1,78,41,3,218,10,95,98,111,111, - 116,115,116,114,97,112,114,133,0,0,0,218,9,78,97,109, - 101,69,114,114,111,114,41,3,114,122,0,0,0,114,123,0, - 0,0,114,133,0,0,0,114,5,0,0,0,114,121,0,0, - 0,114,8,0,0,0,218,11,95,99,104,101,99,107,95,110, - 97,109,101,199,1,0,0,115,14,0,0,0,0,8,14,7, - 2,1,10,1,12,2,14,5,10,1,114,136,0,0,0,99, - 2,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0, - 6,0,0,0,67,0,0,0,115,60,0,0,0,124,0,160, - 0,124,1,161,1,92,2,125,2,125,3,124,2,100,1,117, - 0,114,56,116,1,124,3,131,1,114,56,100,2,125,4,116, - 2,160,3,124,4,160,4,124,3,100,3,25,0,161,1,116, - 5,161,2,1,0,124,2,83,0,41,4,122,155,84,114,121, - 32,116,111,32,102,105,110,100,32,97,32,108,111,97,100,101, - 114,32,102,111,114,32,116,104,101,32,115,112,101,99,105,102, - 105,101,100,32,109,111,100,117,108,101,32,98,121,32,100,101, - 108,101,103,97,116,105,110,103,32,116,111,10,32,32,32,32, - 115,101,108,102,46,102,105,110,100,95,108,111,97,100,101,114, - 40,41,46,10,10,32,32,32,32,84,104,105,115,32,109,101, - 116,104,111,100,32,105,115,32,100,101,112,114,101,99,97,116, - 101,100,32,105,110,32,102,97,118,111,114,32,111,102,32,102, - 105,110,100,101,114,46,102,105,110,100,95,115,112,101,99,40, - 41,46,10,10,32,32,32,32,78,122,44,78,111,116,32,105, - 109,112,111,114,116,105,110,103,32,100,105,114,101,99,116,111, - 114,121,32,123,125,58,32,109,105,115,115,105,110,103,32,95, - 95,105,110,105,116,95,95,114,73,0,0,0,41,6,218,11, - 102,105,110,100,95,108,111,97,100,101,114,114,23,0,0,0, - 114,75,0,0,0,114,76,0,0,0,114,62,0,0,0,218, - 13,73,109,112,111,114,116,87,97,114,110,105,110,103,41,5, - 114,118,0,0,0,218,8,102,117,108,108,110,97,109,101,218, - 6,108,111,97,100,101,114,218,8,112,111,114,116,105,111,110, - 115,218,3,109,115,103,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,218,17,95,102,105,110,100,95,109,111,100, - 117,108,101,95,115,104,105,109,227,1,0,0,115,10,0,0, - 0,0,10,14,1,16,1,4,1,22,1,114,143,0,0,0, - 99,3,0,0,0,0,0,0,0,0,0,0,0,6,0,0, - 0,4,0,0,0,67,0,0,0,115,166,0,0,0,124,0, - 100,1,100,2,133,2,25,0,125,3,124,3,116,0,107,3, - 114,64,100,3,124,1,155,2,100,4,124,3,155,2,157,4, - 125,4,116,1,160,2,100,5,124,4,161,2,1,0,116,3, - 124,4,102,1,105,0,124,2,164,1,142,1,130,1,116,4, - 124,0,131,1,100,6,107,0,114,106,100,7,124,1,155,2, - 157,2,125,4,116,1,160,2,100,5,124,4,161,2,1,0, - 116,5,124,4,131,1,130,1,116,6,124,0,100,2,100,8, - 133,2,25,0,131,1,125,5,124,5,100,9,64,0,114,162, - 100,10,124,5,155,2,100,11,124,1,155,2,157,4,125,4, - 116,3,124,4,102,1,105,0,124,2,164,1,142,1,130,1, - 124,5,83,0,41,12,97,84,2,0,0,80,101,114,102,111, - 114,109,32,98,97,115,105,99,32,118,97,108,105,100,105,116, - 121,32,99,104,101,99,107,105,110,103,32,111,102,32,97,32, - 112,121,99,32,104,101,97,100,101,114,32,97,110,100,32,114, - 101,116,117,114,110,32,116,104,101,32,102,108,97,103,115,32, - 102,105,101,108,100,44,10,32,32,32,32,119,104,105,99,104, - 32,100,101,116,101,114,109,105,110,101,115,32,104,111,119,32, - 116,104,101,32,112,121,99,32,115,104,111,117,108,100,32,98, - 101,32,102,117,114,116,104,101,114,32,118,97,108,105,100,97, - 116,101,100,32,97,103,97,105,110,115,116,32,116,104,101,32, - 115,111,117,114,99,101,46,10,10,32,32,32,32,42,100,97, - 116,97,42,32,105,115,32,116,104,101,32,99,111,110,116,101, - 110,116,115,32,111,102,32,116,104,101,32,112,121,99,32,102, - 105,108,101,46,32,40,79,110,108,121,32,116,104,101,32,102, - 105,114,115,116,32,49,54,32,98,121,116,101,115,32,97,114, - 101,10,32,32,32,32,114,101,113,117,105,114,101,100,44,32, - 116,104,111,117,103,104,46,41,10,10,32,32,32,32,42,110, - 97,109,101,42,32,105,115,32,116,104,101,32,110,97,109,101, - 32,111,102,32,116,104,101,32,109,111,100,117,108,101,32,98, - 101,105,110,103,32,105,109,112,111,114,116,101,100,46,32,73, - 116,32,105,115,32,117,115,101,100,32,102,111,114,32,108,111, - 103,103,105,110,103,46,10,10,32,32,32,32,42,101,120,99, - 95,100,101,116,97,105,108,115,42,32,105,115,32,97,32,100, - 105,99,116,105,111,110,97,114,121,32,112,97,115,115,101,100, - 32,116,111,32,73,109,112,111,114,116,69,114,114,111,114,32, - 105,102,32,105,116,32,114,97,105,115,101,100,32,102,111,114, - 10,32,32,32,32,105,109,112,114,111,118,101,100,32,100,101, - 98,117,103,103,105,110,103,46,10,10,32,32,32,32,73,109, - 112,111,114,116,69,114,114,111,114,32,105,115,32,114,97,105, - 115,101,100,32,119,104,101,110,32,116,104,101,32,109,97,103, - 105,99,32,110,117,109,98,101,114,32,105,115,32,105,110,99, - 111,114,114,101,99,116,32,111,114,32,119,104,101,110,32,116, - 104,101,32,102,108,97,103,115,10,32,32,32,32,102,105,101, - 108,100,32,105,115,32,105,110,118,97,108,105,100,46,32,69, - 79,70,69,114,114,111,114,32,105,115,32,114,97,105,115,101, - 100,32,119,104,101,110,32,116,104,101,32,100,97,116,97,32, - 105,115,32,102,111,117,110,100,32,116,111,32,98,101,32,116, - 114,117,110,99,97,116,101,100,46,10,10,32,32,32,32,78, - 114,16,0,0,0,122,20,98,97,100,32,109,97,103,105,99, - 32,110,117,109,98,101,114,32,105,110,32,122,2,58,32,250, - 2,123,125,233,16,0,0,0,122,40,114,101,97,99,104,101, - 100,32,69,79,70,32,119,104,105,108,101,32,114,101,97,100, - 105,110,103,32,112,121,99,32,104,101,97,100,101,114,32,111, - 102,32,233,8,0,0,0,233,252,255,255,255,122,14,105,110, - 118,97,108,105,100,32,102,108,97,103,115,32,122,4,32,105, - 110,32,41,7,218,12,77,65,71,73,67,95,78,85,77,66, - 69,82,114,134,0,0,0,218,16,95,118,101,114,98,111,115, - 101,95,109,101,115,115,97,103,101,114,117,0,0,0,114,23, - 0,0,0,218,8,69,79,70,69,114,114,111,114,114,27,0, - 0,0,41,6,114,26,0,0,0,114,116,0,0,0,218,11, - 101,120,99,95,100,101,116,97,105,108,115,90,5,109,97,103, - 105,99,114,92,0,0,0,114,2,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,218,13,95,99,108, - 97,115,115,105,102,121,95,112,121,99,244,1,0,0,115,28, - 0,0,0,0,16,12,1,8,1,16,1,12,1,16,1,12, - 1,10,1,12,1,8,1,16,2,8,1,16,1,16,1,114, - 152,0,0,0,99,5,0,0,0,0,0,0,0,0,0,0, - 0,6,0,0,0,4,0,0,0,67,0,0,0,115,120,0, - 0,0,116,0,124,0,100,1,100,2,133,2,25,0,131,1, - 124,1,100,3,64,0,107,3,114,62,100,4,124,3,155,2, - 157,2,125,5,116,1,160,2,100,5,124,5,161,2,1,0, - 116,3,124,5,102,1,105,0,124,4,164,1,142,1,130,1, - 124,2,100,6,117,1,114,116,116,0,124,0,100,2,100,7, - 133,2,25,0,131,1,124,2,100,3,64,0,107,3,114,116, - 116,3,100,4,124,3,155,2,157,2,102,1,105,0,124,4, - 164,1,142,1,130,1,100,6,83,0,41,8,97,7,2,0, - 0,86,97,108,105,100,97,116,101,32,97,32,112,121,99,32, - 97,103,97,105,110,115,116,32,116,104,101,32,115,111,117,114, - 99,101,32,108,97,115,116,45,109,111,100,105,102,105,101,100, - 32,116,105,109,101,46,10,10,32,32,32,32,42,100,97,116, - 97,42,32,105,115,32,116,104,101,32,99,111,110,116,101,110, - 116,115,32,111,102,32,116,104,101,32,112,121,99,32,102,105, - 108,101,46,32,40,79,110,108,121,32,116,104,101,32,102,105, - 114,115,116,32,49,54,32,98,121,116,101,115,32,97,114,101, - 10,32,32,32,32,114,101,113,117,105,114,101,100,46,41,10, - 10,32,32,32,32,42,115,111,117,114,99,101,95,109,116,105, - 109,101,42,32,105,115,32,116,104,101,32,108,97,115,116,32, - 109,111,100,105,102,105,101,100,32,116,105,109,101,115,116,97, - 109,112,32,111,102,32,116,104,101,32,115,111,117,114,99,101, - 32,102,105,108,101,46,10,10,32,32,32,32,42,115,111,117, - 114,99,101,95,115,105,122,101,42,32,105,115,32,78,111,110, - 101,32,111,114,32,116,104,101,32,115,105,122,101,32,111,102, - 32,116,104,101,32,115,111,117,114,99,101,32,102,105,108,101, - 32,105,110,32,98,121,116,101,115,46,10,10,32,32,32,32, - 42,110,97,109,101,42,32,105,115,32,116,104,101,32,110,97, - 109,101,32,111,102,32,116,104,101,32,109,111,100,117,108,101, - 32,98,101,105,110,103,32,105,109,112,111,114,116,101,100,46, - 32,73,116,32,105,115,32,117,115,101,100,32,102,111,114,32, - 108,111,103,103,105,110,103,46,10,10,32,32,32,32,42,101, - 120,99,95,100,101,116,97,105,108,115,42,32,105,115,32,97, - 32,100,105,99,116,105,111,110,97,114,121,32,112,97,115,115, - 101,100,32,116,111,32,73,109,112,111,114,116,69,114,114,111, - 114,32,105,102,32,105,116,32,114,97,105,115,101,100,32,102, - 111,114,10,32,32,32,32,105,109,112,114,111,118,101,100,32, - 100,101,98,117,103,103,105,110,103,46,10,10,32,32,32,32, - 65,110,32,73,109,112,111,114,116,69,114,114,111,114,32,105, - 115,32,114,97,105,115,101,100,32,105,102,32,116,104,101,32, - 98,121,116,101,99,111,100,101,32,105,115,32,115,116,97,108, - 101,46,10,10,32,32,32,32,114,146,0,0,0,233,12,0, - 0,0,114,15,0,0,0,122,22,98,121,116,101,99,111,100, - 101,32,105,115,32,115,116,97,108,101,32,102,111,114,32,114, - 144,0,0,0,78,114,145,0,0,0,41,4,114,27,0,0, - 0,114,134,0,0,0,114,149,0,0,0,114,117,0,0,0, - 41,6,114,26,0,0,0,218,12,115,111,117,114,99,101,95, - 109,116,105,109,101,218,11,115,111,117,114,99,101,95,115,105, - 122,101,114,116,0,0,0,114,151,0,0,0,114,92,0,0, - 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 218,23,95,118,97,108,105,100,97,116,101,95,116,105,109,101, - 115,116,97,109,112,95,112,121,99,21,2,0,0,115,16,0, - 0,0,0,19,24,1,10,1,12,1,16,1,8,1,22,255, - 2,2,114,156,0,0,0,99,4,0,0,0,0,0,0,0, - 0,0,0,0,4,0,0,0,4,0,0,0,67,0,0,0, - 115,42,0,0,0,124,0,100,1,100,2,133,2,25,0,124, - 1,107,3,114,38,116,0,100,3,124,2,155,2,157,2,102, - 1,105,0,124,3,164,1,142,1,130,1,100,4,83,0,41, - 5,97,243,1,0,0,86,97,108,105,100,97,116,101,32,97, - 32,104,97,115,104,45,98,97,115,101,100,32,112,121,99,32, - 98,121,32,99,104,101,99,107,105,110,103,32,116,104,101,32, - 114,101,97,108,32,115,111,117,114,99,101,32,104,97,115,104, - 32,97,103,97,105,110,115,116,32,116,104,101,32,111,110,101, - 32,105,110,10,32,32,32,32,116,104,101,32,112,121,99,32, - 104,101,97,100,101,114,46,10,10,32,32,32,32,42,100,97, - 116,97,42,32,105,115,32,116,104,101,32,99,111,110,116,101, - 110,116,115,32,111,102,32,116,104,101,32,112,121,99,32,102, - 105,108,101,46,32,40,79,110,108,121,32,116,104,101,32,102, - 105,114,115,116,32,49,54,32,98,121,116,101,115,32,97,114, - 101,10,32,32,32,32,114,101,113,117,105,114,101,100,46,41, - 10,10,32,32,32,32,42,115,111,117,114,99,101,95,104,97, - 115,104,42,32,105,115,32,116,104,101,32,105,109,112,111,114, - 116,108,105,98,46,117,116,105,108,46,115,111,117,114,99,101, - 95,104,97,115,104,40,41,32,111,102,32,116,104,101,32,115, - 111,117,114,99,101,32,102,105,108,101,46,10,10,32,32,32, - 32,42,110,97,109,101,42,32,105,115,32,116,104,101,32,110, - 97,109,101,32,111,102,32,116,104,101,32,109,111,100,117,108, - 101,32,98,101,105,110,103,32,105,109,112,111,114,116,101,100, - 46,32,73,116,32,105,115,32,117,115,101,100,32,102,111,114, - 32,108,111,103,103,105,110,103,46,10,10,32,32,32,32,42, - 101,120,99,95,100,101,116,97,105,108,115,42,32,105,115,32, - 97,32,100,105,99,116,105,111,110,97,114,121,32,112,97,115, - 115,101,100,32,116,111,32,73,109,112,111,114,116,69,114,114, - 111,114,32,105,102,32,105,116,32,114,97,105,115,101,100,32, - 102,111,114,10,32,32,32,32,105,109,112,114,111,118,101,100, - 32,100,101,98,117,103,103,105,110,103,46,10,10,32,32,32, - 32,65,110,32,73,109,112,111,114,116,69,114,114,111,114,32, - 105,115,32,114,97,105,115,101,100,32,105,102,32,116,104,101, - 32,98,121,116,101,99,111,100,101,32,105,115,32,115,116,97, - 108,101,46,10,10,32,32,32,32,114,146,0,0,0,114,145, - 0,0,0,122,46,104,97,115,104,32,105,110,32,98,121,116, - 101,99,111,100,101,32,100,111,101,115,110,39,116,32,109,97, - 116,99,104,32,104,97,115,104,32,111,102,32,115,111,117,114, - 99,101,32,78,41,1,114,117,0,0,0,41,4,114,26,0, - 0,0,218,11,115,111,117,114,99,101,95,104,97,115,104,114, - 116,0,0,0,114,151,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,218,18,95,118,97,108,105,100, - 97,116,101,95,104,97,115,104,95,112,121,99,49,2,0,0, - 115,12,0,0,0,0,17,16,1,2,1,8,255,4,2,2, - 254,114,158,0,0,0,99,4,0,0,0,0,0,0,0,0, - 0,0,0,5,0,0,0,5,0,0,0,67,0,0,0,115, - 80,0,0,0,116,0,160,1,124,0,161,1,125,4,116,2, - 124,4,116,3,131,2,114,56,116,4,160,5,100,1,124,2, - 161,2,1,0,124,3,100,2,117,1,114,52,116,6,160,7, - 124,4,124,3,161,2,1,0,124,4,83,0,116,8,100,3, - 160,9,124,2,161,1,124,1,124,2,100,4,141,3,130,1, - 100,2,83,0,41,5,122,35,67,111,109,112,105,108,101,32, - 98,121,116,101,99,111,100,101,32,97,115,32,102,111,117,110, - 100,32,105,110,32,97,32,112,121,99,46,122,21,99,111,100, - 101,32,111,98,106,101,99,116,32,102,114,111,109,32,123,33, - 114,125,78,122,23,78,111,110,45,99,111,100,101,32,111,98, - 106,101,99,116,32,105,110,32,123,33,114,125,169,2,114,116, - 0,0,0,114,44,0,0,0,41,10,218,7,109,97,114,115, - 104,97,108,90,5,108,111,97,100,115,218,10,105,115,105,110, - 115,116,97,110,99,101,218,10,95,99,111,100,101,95,116,121, - 112,101,114,134,0,0,0,114,149,0,0,0,218,4,95,105, - 109,112,90,16,95,102,105,120,95,99,111,95,102,105,108,101, - 110,97,109,101,114,117,0,0,0,114,62,0,0,0,41,5, - 114,26,0,0,0,114,116,0,0,0,114,106,0,0,0,114, - 107,0,0,0,218,4,99,111,100,101,114,5,0,0,0,114, - 5,0,0,0,114,8,0,0,0,218,17,95,99,111,109,112, - 105,108,101,95,98,121,116,101,99,111,100,101,73,2,0,0, - 115,18,0,0,0,0,2,10,1,10,1,12,1,8,1,12, - 1,4,2,10,1,4,255,114,165,0,0,0,114,73,0,0, - 0,99,3,0,0,0,0,0,0,0,0,0,0,0,4,0, - 0,0,5,0,0,0,67,0,0,0,115,70,0,0,0,116, - 0,116,1,131,1,125,3,124,3,160,2,116,3,100,1,131, - 1,161,1,1,0,124,3,160,2,116,3,124,1,131,1,161, - 1,1,0,124,3,160,2,116,3,124,2,131,1,161,1,1, - 0,124,3,160,2,116,4,160,5,124,0,161,1,161,1,1, - 0,124,3,83,0,41,2,122,43,80,114,111,100,117,99,101, - 32,116,104,101,32,100,97,116,97,32,102,111,114,32,97,32, - 116,105,109,101,115,116,97,109,112,45,98,97,115,101,100,32, - 112,121,99,46,114,73,0,0,0,41,6,218,9,98,121,116, - 101,97,114,114,97,121,114,148,0,0,0,218,6,101,120,116, - 101,110,100,114,21,0,0,0,114,160,0,0,0,218,5,100, - 117,109,112,115,41,4,114,164,0,0,0,218,5,109,116,105, - 109,101,114,155,0,0,0,114,26,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,218,22,95,99,111, - 100,101,95,116,111,95,116,105,109,101,115,116,97,109,112,95, - 112,121,99,86,2,0,0,115,12,0,0,0,0,2,8,1, - 14,1,14,1,14,1,16,1,114,170,0,0,0,84,99,3, - 0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,5, - 0,0,0,67,0,0,0,115,80,0,0,0,116,0,116,1, - 131,1,125,3,100,1,124,2,100,1,62,0,66,0,125,4, - 124,3,160,2,116,3,124,4,131,1,161,1,1,0,116,4, - 124,1,131,1,100,2,107,2,115,50,74,0,130,1,124,3, - 160,2,124,1,161,1,1,0,124,3,160,2,116,5,160,6, - 124,0,161,1,161,1,1,0,124,3,83,0,41,3,122,38, - 80,114,111,100,117,99,101,32,116,104,101,32,100,97,116,97, - 32,102,111,114,32,97,32,104,97,115,104,45,98,97,115,101, - 100,32,112,121,99,46,114,39,0,0,0,114,146,0,0,0, - 41,7,114,166,0,0,0,114,148,0,0,0,114,167,0,0, - 0,114,21,0,0,0,114,23,0,0,0,114,160,0,0,0, - 114,168,0,0,0,41,5,114,164,0,0,0,114,157,0,0, - 0,90,7,99,104,101,99,107,101,100,114,26,0,0,0,114, + 1,16,1,4,1,4,1,12,1,8,1,8,1,2,255,8, + 2,10,1,8,1,16,1,10,1,16,1,10,1,4,1,2, + 255,8,2,16,1,10,1,16,2,14,1,114,102,0,0,0, + 99,1,0,0,0,0,0,0,0,0,0,0,0,5,0,0, + 0,9,0,0,0,67,0,0,0,115,124,0,0,0,116,0, + 124,0,131,1,100,1,107,2,114,16,100,2,83,0,124,0, + 160,1,100,3,161,1,92,3,125,1,125,2,125,3,124,1, + 114,56,124,3,160,2,161,0,100,4,100,5,133,2,25,0, + 100,6,107,3,114,60,124,0,83,0,122,12,116,3,124,0, + 131,1,125,4,87,0,110,34,4,0,116,4,116,5,102,2, + 121,106,1,0,1,0,1,0,124,0,100,2,100,5,133,2, + 25,0,125,4,89,0,110,2,48,0,116,6,124,4,131,1, + 114,120,124,4,83,0,124,0,83,0,41,7,122,188,67,111, + 110,118,101,114,116,32,97,32,98,121,116,101,99,111,100,101, + 32,102,105,108,101,32,112,97,116,104,32,116,111,32,97,32, + 115,111,117,114,99,101,32,112,97,116,104,32,40,105,102,32, + 112,111,115,115,105,98,108,101,41,46,10,10,32,32,32,32, + 84,104,105,115,32,102,117,110,99,116,105,111,110,32,101,120, + 105,115,116,115,32,112,117,114,101,108,121,32,102,111,114,32, + 98,97,99,107,119,97,114,100,115,45,99,111,109,112,97,116, + 105,98,105,108,105,116,121,32,102,111,114,10,32,32,32,32, + 80,121,73,109,112,111,114,116,95,69,120,101,99,67,111,100, + 101,77,111,100,117,108,101,87,105,116,104,70,105,108,101,110, + 97,109,101,115,40,41,32,105,110,32,116,104,101,32,67,32, + 65,80,73,46,10,10,32,32,32,32,114,73,0,0,0,78, + 114,71,0,0,0,233,253,255,255,255,233,255,255,255,255,90, + 2,112,121,41,7,114,23,0,0,0,114,41,0,0,0,218, + 5,108,111,119,101,114,114,102,0,0,0,114,82,0,0,0, + 114,86,0,0,0,114,54,0,0,0,41,5,218,13,98,121, + 116,101,99,111,100,101,95,112,97,116,104,114,95,0,0,0, + 114,45,0,0,0,90,9,101,120,116,101,110,115,105,111,110, + 218,11,115,111,117,114,99,101,95,112,97,116,104,114,5,0, + 0,0,114,5,0,0,0,114,8,0,0,0,218,15,95,103, + 101,116,95,115,111,117,114,99,101,102,105,108,101,156,1,0, + 0,115,20,0,0,0,0,7,12,1,4,1,16,1,24,1, + 4,1,2,1,12,1,16,1,18,1,114,108,0,0,0,99, + 1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, + 8,0,0,0,67,0,0,0,115,72,0,0,0,124,0,160, + 0,116,1,116,2,131,1,161,1,114,46,122,10,116,3,124, + 0,131,1,87,0,83,0,4,0,116,4,121,42,1,0,1, + 0,1,0,89,0,113,68,48,0,110,22,124,0,160,0,116, + 1,116,5,131,1,161,1,114,64,124,0,83,0,100,0,83, + 0,100,0,83,0,169,1,78,41,6,218,8,101,110,100,115, + 119,105,116,104,218,5,116,117,112,108,101,114,101,0,0,0, + 114,97,0,0,0,114,82,0,0,0,114,88,0,0,0,41, + 1,114,96,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,11,95,103,101,116,95,99,97,99,104, + 101,100,175,1,0,0,115,16,0,0,0,0,1,14,1,2, + 1,10,1,12,1,8,1,14,1,4,2,114,112,0,0,0, + 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,8,0,0,0,67,0,0,0,115,50,0,0,0,122,14, + 116,0,124,0,131,1,106,1,125,1,87,0,110,22,4,0, + 116,2,121,36,1,0,1,0,1,0,100,1,125,1,89,0, + 110,2,48,0,124,1,100,2,79,0,125,1,124,1,83,0, + 41,3,122,51,67,97,108,99,117,108,97,116,101,32,116,104, + 101,32,109,111,100,101,32,112,101,114,109,105,115,115,105,111, + 110,115,32,102,111,114,32,97,32,98,121,116,101,99,111,100, + 101,32,102,105,108,101,46,114,60,0,0,0,233,128,0,0, + 0,41,3,114,49,0,0,0,114,51,0,0,0,114,50,0, + 0,0,41,2,114,44,0,0,0,114,52,0,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,10,95, + 99,97,108,99,95,109,111,100,101,187,1,0,0,115,12,0, + 0,0,0,2,2,1,14,1,12,1,10,3,8,1,114,114, + 0,0,0,99,1,0,0,0,0,0,0,0,0,0,0,0, + 3,0,0,0,8,0,0,0,3,0,0,0,115,66,0,0, + 0,100,6,135,0,102,1,100,2,100,3,132,9,125,1,122, + 10,116,0,106,1,125,2,87,0,110,26,4,0,116,2,121, + 50,1,0,1,0,1,0,100,4,100,5,132,0,125,2,89, + 0,110,2,48,0,124,2,124,1,136,0,131,2,1,0,124, + 1,83,0,41,7,122,252,68,101,99,111,114,97,116,111,114, + 32,116,111,32,118,101,114,105,102,121,32,116,104,97,116,32, + 116,104,101,32,109,111,100,117,108,101,32,98,101,105,110,103, + 32,114,101,113,117,101,115,116,101,100,32,109,97,116,99,104, + 101,115,32,116,104,101,32,111,110,101,32,116,104,101,10,32, + 32,32,32,108,111,97,100,101,114,32,99,97,110,32,104,97, + 110,100,108,101,46,10,10,32,32,32,32,84,104,101,32,102, + 105,114,115,116,32,97,114,103,117,109,101,110,116,32,40,115, + 101,108,102,41,32,109,117,115,116,32,100,101,102,105,110,101, + 32,95,110,97,109,101,32,119,104,105,99,104,32,116,104,101, + 32,115,101,99,111,110,100,32,97,114,103,117,109,101,110,116, + 32,105,115,10,32,32,32,32,99,111,109,112,97,114,101,100, + 32,97,103,97,105,110,115,116,46,32,73,102,32,116,104,101, + 32,99,111,109,112,97,114,105,115,111,110,32,102,97,105,108, + 115,32,116,104,101,110,32,73,109,112,111,114,116,69,114,114, + 111,114,32,105,115,32,114,97,105,115,101,100,46,10,10,32, + 32,32,32,78,99,2,0,0,0,0,0,0,0,0,0,0, + 0,4,0,0,0,4,0,0,0,31,0,0,0,115,72,0, + 0,0,124,1,100,0,117,0,114,16,124,0,106,0,125,1, + 110,32,124,0,106,0,124,1,107,3,114,48,116,1,100,1, + 124,0,106,0,124,1,102,2,22,0,124,1,100,2,141,2, + 130,1,136,0,124,0,124,1,103,2,124,2,162,1,82,0, + 105,0,124,3,164,1,142,1,83,0,41,3,78,122,30,108, + 111,97,100,101,114,32,102,111,114,32,37,115,32,99,97,110, + 110,111,116,32,104,97,110,100,108,101,32,37,115,169,1,218, + 4,110,97,109,101,41,2,114,116,0,0,0,218,11,73,109, + 112,111,114,116,69,114,114,111,114,41,4,218,4,115,101,108, + 102,114,116,0,0,0,218,4,97,114,103,115,218,6,107,119, + 97,114,103,115,169,1,218,6,109,101,116,104,111,100,114,5, + 0,0,0,114,8,0,0,0,218,19,95,99,104,101,99,107, + 95,110,97,109,101,95,119,114,97,112,112,101,114,207,1,0, + 0,115,18,0,0,0,0,1,8,1,8,1,10,1,4,1, + 8,255,2,1,2,255,6,2,122,40,95,99,104,101,99,107, + 95,110,97,109,101,46,60,108,111,99,97,108,115,62,46,95, + 99,104,101,99,107,95,110,97,109,101,95,119,114,97,112,112, + 101,114,99,2,0,0,0,0,0,0,0,0,0,0,0,3, + 0,0,0,7,0,0,0,83,0,0,0,115,56,0,0,0, + 100,1,68,0,93,32,125,2,116,0,124,1,124,2,131,2, + 114,4,116,1,124,0,124,2,116,2,124,1,124,2,131,2, + 131,3,1,0,113,4,124,0,106,3,160,4,124,1,106,3, + 161,1,1,0,100,0,83,0,41,2,78,41,4,218,10,95, + 95,109,111,100,117,108,101,95,95,218,8,95,95,110,97,109, + 101,95,95,218,12,95,95,113,117,97,108,110,97,109,101,95, + 95,218,7,95,95,100,111,99,95,95,41,5,218,7,104,97, + 115,97,116,116,114,218,7,115,101,116,97,116,116,114,218,7, + 103,101,116,97,116,116,114,218,8,95,95,100,105,99,116,95, + 95,218,6,117,112,100,97,116,101,41,3,90,3,110,101,119, + 90,3,111,108,100,114,67,0,0,0,114,5,0,0,0,114, + 5,0,0,0,114,8,0,0,0,218,5,95,119,114,97,112, + 218,1,0,0,115,8,0,0,0,0,1,8,1,10,1,20, + 1,122,26,95,99,104,101,99,107,95,110,97,109,101,46,60, + 108,111,99,97,108,115,62,46,95,119,114,97,112,41,1,78, + 41,3,218,10,95,98,111,111,116,115,116,114,97,112,114,133, + 0,0,0,218,9,78,97,109,101,69,114,114,111,114,41,3, + 114,122,0,0,0,114,123,0,0,0,114,133,0,0,0,114, + 5,0,0,0,114,121,0,0,0,114,8,0,0,0,218,11, + 95,99,104,101,99,107,95,110,97,109,101,199,1,0,0,115, + 14,0,0,0,0,8,14,7,2,1,10,1,12,2,14,5, + 10,1,114,136,0,0,0,99,2,0,0,0,0,0,0,0, + 0,0,0,0,5,0,0,0,6,0,0,0,67,0,0,0, + 115,60,0,0,0,124,0,160,0,124,1,161,1,92,2,125, + 2,125,3,124,2,100,1,117,0,114,56,116,1,124,3,131, + 1,114,56,100,2,125,4,116,2,160,3,124,4,160,4,124, + 3,100,3,25,0,161,1,116,5,161,2,1,0,124,2,83, + 0,41,4,122,155,84,114,121,32,116,111,32,102,105,110,100, + 32,97,32,108,111,97,100,101,114,32,102,111,114,32,116,104, + 101,32,115,112,101,99,105,102,105,101,100,32,109,111,100,117, + 108,101,32,98,121,32,100,101,108,101,103,97,116,105,110,103, + 32,116,111,10,32,32,32,32,115,101,108,102,46,102,105,110, + 100,95,108,111,97,100,101,114,40,41,46,10,10,32,32,32, + 32,84,104,105,115,32,109,101,116,104,111,100,32,105,115,32, + 100,101,112,114,101,99,97,116,101,100,32,105,110,32,102,97, + 118,111,114,32,111,102,32,102,105,110,100,101,114,46,102,105, + 110,100,95,115,112,101,99,40,41,46,10,10,32,32,32,32, + 78,122,44,78,111,116,32,105,109,112,111,114,116,105,110,103, + 32,100,105,114,101,99,116,111,114,121,32,123,125,58,32,109, + 105,115,115,105,110,103,32,95,95,105,110,105,116,95,95,114, + 73,0,0,0,41,6,218,11,102,105,110,100,95,108,111,97, + 100,101,114,114,23,0,0,0,114,75,0,0,0,114,76,0, + 0,0,114,62,0,0,0,218,13,73,109,112,111,114,116,87, + 97,114,110,105,110,103,41,5,114,118,0,0,0,218,8,102, + 117,108,108,110,97,109,101,218,6,108,111,97,100,101,114,218, + 8,112,111,114,116,105,111,110,115,218,3,109,115,103,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,17,95, + 102,105,110,100,95,109,111,100,117,108,101,95,115,104,105,109, + 227,1,0,0,115,10,0,0,0,0,10,14,1,16,1,4, + 1,22,1,114,143,0,0,0,99,3,0,0,0,0,0,0, + 0,0,0,0,0,6,0,0,0,4,0,0,0,67,0,0, + 0,115,166,0,0,0,124,0,100,1,100,2,133,2,25,0, + 125,3,124,3,116,0,107,3,114,64,100,3,124,1,155,2, + 100,4,124,3,155,2,157,4,125,4,116,1,160,2,100,5, + 124,4,161,2,1,0,116,3,124,4,102,1,105,0,124,2, + 164,1,142,1,130,1,116,4,124,0,131,1,100,6,107,0, + 114,106,100,7,124,1,155,2,157,2,125,4,116,1,160,2, + 100,5,124,4,161,2,1,0,116,5,124,4,131,1,130,1, + 116,6,124,0,100,2,100,8,133,2,25,0,131,1,125,5, + 124,5,100,9,64,0,114,162,100,10,124,5,155,2,100,11, + 124,1,155,2,157,4,125,4,116,3,124,4,102,1,105,0, + 124,2,164,1,142,1,130,1,124,5,83,0,41,12,97,84, + 2,0,0,80,101,114,102,111,114,109,32,98,97,115,105,99, + 32,118,97,108,105,100,105,116,121,32,99,104,101,99,107,105, + 110,103,32,111,102,32,97,32,112,121,99,32,104,101,97,100, + 101,114,32,97,110,100,32,114,101,116,117,114,110,32,116,104, + 101,32,102,108,97,103,115,32,102,105,101,108,100,44,10,32, + 32,32,32,119,104,105,99,104,32,100,101,116,101,114,109,105, + 110,101,115,32,104,111,119,32,116,104,101,32,112,121,99,32, + 115,104,111,117,108,100,32,98,101,32,102,117,114,116,104,101, + 114,32,118,97,108,105,100,97,116,101,100,32,97,103,97,105, + 110,115,116,32,116,104,101,32,115,111,117,114,99,101,46,10, + 10,32,32,32,32,42,100,97,116,97,42,32,105,115,32,116, + 104,101,32,99,111,110,116,101,110,116,115,32,111,102,32,116, + 104,101,32,112,121,99,32,102,105,108,101,46,32,40,79,110, + 108,121,32,116,104,101,32,102,105,114,115,116,32,49,54,32, + 98,121,116,101,115,32,97,114,101,10,32,32,32,32,114,101, + 113,117,105,114,101,100,44,32,116,104,111,117,103,104,46,41, + 10,10,32,32,32,32,42,110,97,109,101,42,32,105,115,32, + 116,104,101,32,110,97,109,101,32,111,102,32,116,104,101,32, + 109,111,100,117,108,101,32,98,101,105,110,103,32,105,109,112, + 111,114,116,101,100,46,32,73,116,32,105,115,32,117,115,101, + 100,32,102,111,114,32,108,111,103,103,105,110,103,46,10,10, + 32,32,32,32,42,101,120,99,95,100,101,116,97,105,108,115, + 42,32,105,115,32,97,32,100,105,99,116,105,111,110,97,114, + 121,32,112,97,115,115,101,100,32,116,111,32,73,109,112,111, + 114,116,69,114,114,111,114,32,105,102,32,105,116,32,114,97, + 105,115,101,100,32,102,111,114,10,32,32,32,32,105,109,112, + 114,111,118,101,100,32,100,101,98,117,103,103,105,110,103,46, + 10,10,32,32,32,32,73,109,112,111,114,116,69,114,114,111, + 114,32,105,115,32,114,97,105,115,101,100,32,119,104,101,110, + 32,116,104,101,32,109,97,103,105,99,32,110,117,109,98,101, + 114,32,105,115,32,105,110,99,111,114,114,101,99,116,32,111, + 114,32,119,104,101,110,32,116,104,101,32,102,108,97,103,115, + 10,32,32,32,32,102,105,101,108,100,32,105,115,32,105,110, + 118,97,108,105,100,46,32,69,79,70,69,114,114,111,114,32, + 105,115,32,114,97,105,115,101,100,32,119,104,101,110,32,116, + 104,101,32,100,97,116,97,32,105,115,32,102,111,117,110,100, + 32,116,111,32,98,101,32,116,114,117,110,99,97,116,101,100, + 46,10,10,32,32,32,32,78,114,16,0,0,0,122,20,98, + 97,100,32,109,97,103,105,99,32,110,117,109,98,101,114,32, + 105,110,32,122,2,58,32,250,2,123,125,233,16,0,0,0, + 122,40,114,101,97,99,104,101,100,32,69,79,70,32,119,104, + 105,108,101,32,114,101,97,100,105,110,103,32,112,121,99,32, + 104,101,97,100,101,114,32,111,102,32,233,8,0,0,0,233, + 252,255,255,255,122,14,105,110,118,97,108,105,100,32,102,108, + 97,103,115,32,122,4,32,105,110,32,41,7,218,12,77,65, + 71,73,67,95,78,85,77,66,69,82,114,134,0,0,0,218, + 16,95,118,101,114,98,111,115,101,95,109,101,115,115,97,103, + 101,114,117,0,0,0,114,23,0,0,0,218,8,69,79,70, + 69,114,114,111,114,114,27,0,0,0,41,6,114,26,0,0, + 0,114,116,0,0,0,218,11,101,120,99,95,100,101,116,97, + 105,108,115,90,5,109,97,103,105,99,114,92,0,0,0,114, 2,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,218,17,95,99,111,100,101,95,116,111,95,104,97, - 115,104,95,112,121,99,96,2,0,0,115,14,0,0,0,0, - 2,8,1,12,1,14,1,16,1,10,1,16,1,114,171,0, - 0,0,99,1,0,0,0,0,0,0,0,0,0,0,0,5, - 0,0,0,6,0,0,0,67,0,0,0,115,62,0,0,0, - 100,1,100,2,108,0,125,1,116,1,160,2,124,0,161,1, - 106,3,125,2,124,1,160,4,124,2,161,1,125,3,116,1, - 160,5,100,2,100,3,161,2,125,4,124,4,160,6,124,0, - 160,6,124,3,100,1,25,0,161,1,161,1,83,0,41,4, - 122,121,68,101,99,111,100,101,32,98,121,116,101,115,32,114, - 101,112,114,101,115,101,110,116,105,110,103,32,115,111,117,114, - 99,101,32,99,111,100,101,32,97,110,100,32,114,101,116,117, - 114,110,32,116,104,101,32,115,116,114,105,110,103,46,10,10, - 32,32,32,32,85,110,105,118,101,114,115,97,108,32,110,101, - 119,108,105,110,101,32,115,117,112,112,111,114,116,32,105,115, - 32,117,115,101,100,32,105,110,32,116,104,101,32,100,101,99, - 111,100,105,110,103,46,10,32,32,32,32,114,73,0,0,0, - 78,84,41,7,218,8,116,111,107,101,110,105,122,101,114,64, - 0,0,0,90,7,66,121,116,101,115,73,79,90,8,114,101, - 97,100,108,105,110,101,90,15,100,101,116,101,99,116,95,101, - 110,99,111,100,105,110,103,90,25,73,110,99,114,101,109,101, - 110,116,97,108,78,101,119,108,105,110,101,68,101,99,111,100, - 101,114,218,6,100,101,99,111,100,101,41,5,218,12,115,111, - 117,114,99,101,95,98,121,116,101,115,114,172,0,0,0,90, - 21,115,111,117,114,99,101,95,98,121,116,101,115,95,114,101, - 97,100,108,105,110,101,218,8,101,110,99,111,100,105,110,103, - 90,15,110,101,119,108,105,110,101,95,100,101,99,111,100,101, - 114,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 218,13,100,101,99,111,100,101,95,115,111,117,114,99,101,107, - 2,0,0,115,10,0,0,0,0,5,8,1,12,1,10,1, - 12,1,114,176,0,0,0,169,2,114,140,0,0,0,218,26, - 115,117,98,109,111,100,117,108,101,95,115,101,97,114,99,104, - 95,108,111,99,97,116,105,111,110,115,99,2,0,0,0,0, - 0,0,0,2,0,0,0,9,0,0,0,8,0,0,0,67, - 0,0,0,115,12,1,0,0,124,1,100,1,117,0,114,58, - 100,2,125,1,116,0,124,2,100,3,131,2,114,68,122,14, - 124,2,160,1,124,0,161,1,125,1,87,0,113,68,4,0, - 116,2,121,54,1,0,1,0,1,0,89,0,113,68,48,0, - 110,10,116,3,160,4,124,1,161,1,125,1,116,5,106,6, - 124,0,124,2,124,1,100,4,141,3,125,4,100,5,124,4, - 95,7,124,2,100,1,117,0,114,152,116,8,131,0,68,0, - 93,42,92,2,125,5,125,6,124,1,160,9,116,10,124,6, - 131,1,161,1,114,104,124,5,124,0,124,1,131,2,125,2, - 124,2,124,4,95,11,1,0,113,152,113,104,100,1,83,0, - 124,3,116,12,117,0,114,216,116,0,124,2,100,6,131,2, - 114,222,122,14,124,2,160,13,124,0,161,1,125,7,87,0, - 110,18,4,0,116,2,121,202,1,0,1,0,1,0,89,0, - 113,222,48,0,124,7,114,222,103,0,124,4,95,14,110,6, - 124,3,124,4,95,14,124,4,106,14,103,0,107,2,144,1, - 114,8,124,1,144,1,114,8,116,15,124,1,131,1,100,7, - 25,0,125,8,124,4,106,14,160,16,124,8,161,1,1,0, - 124,4,83,0,41,8,97,61,1,0,0,82,101,116,117,114, - 110,32,97,32,109,111,100,117,108,101,32,115,112,101,99,32, - 98,97,115,101,100,32,111,110,32,97,32,102,105,108,101,32, - 108,111,99,97,116,105,111,110,46,10,10,32,32,32,32,84, - 111,32,105,110,100,105,99,97,116,101,32,116,104,97,116,32, - 116,104,101,32,109,111,100,117,108,101,32,105,115,32,97,32, - 112,97,99,107,97,103,101,44,32,115,101,116,10,32,32,32, - 32,115,117,98,109,111,100,117,108,101,95,115,101,97,114,99, - 104,95,108,111,99,97,116,105,111,110,115,32,116,111,32,97, - 32,108,105,115,116,32,111,102,32,100,105,114,101,99,116,111, - 114,121,32,112,97,116,104,115,46,32,32,65,110,10,32,32, - 32,32,101,109,112,116,121,32,108,105,115,116,32,105,115,32, - 115,117,102,102,105,99,105,101,110,116,44,32,116,104,111,117, - 103,104,32,105,116,115,32,110,111,116,32,111,116,104,101,114, - 119,105,115,101,32,117,115,101,102,117,108,32,116,111,32,116, - 104,101,10,32,32,32,32,105,109,112,111,114,116,32,115,121, - 115,116,101,109,46,10,10,32,32,32,32,84,104,101,32,108, - 111,97,100,101,114,32,109,117,115,116,32,116,97,107,101,32, - 97,32,115,112,101,99,32,97,115,32,105,116,115,32,111,110, - 108,121,32,95,95,105,110,105,116,95,95,40,41,32,97,114, - 103,46,10,10,32,32,32,32,78,122,9,60,117,110,107,110, - 111,119,110,62,218,12,103,101,116,95,102,105,108,101,110,97, - 109,101,169,1,218,6,111,114,105,103,105,110,84,218,10,105, - 115,95,112,97,99,107,97,103,101,114,73,0,0,0,41,17, - 114,128,0,0,0,114,179,0,0,0,114,117,0,0,0,114, - 4,0,0,0,114,79,0,0,0,114,134,0,0,0,218,10, - 77,111,100,117,108,101,83,112,101,99,90,13,95,115,101,116, - 95,102,105,108,101,97,116,116,114,218,27,95,103,101,116,95, - 115,117,112,112,111,114,116,101,100,95,102,105,108,101,95,108, - 111,97,100,101,114,115,114,110,0,0,0,114,111,0,0,0, - 114,140,0,0,0,218,9,95,80,79,80,85,76,65,84,69, - 114,182,0,0,0,114,178,0,0,0,114,47,0,0,0,218, - 6,97,112,112,101,110,100,41,9,114,116,0,0,0,90,8, - 108,111,99,97,116,105,111,110,114,140,0,0,0,114,178,0, - 0,0,218,4,115,112,101,99,218,12,108,111,97,100,101,114, - 95,99,108,97,115,115,218,8,115,117,102,102,105,120,101,115, - 114,182,0,0,0,90,7,100,105,114,110,97,109,101,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,218,23,115, - 112,101,99,95,102,114,111,109,95,102,105,108,101,95,108,111, - 99,97,116,105,111,110,124,2,0,0,115,62,0,0,0,0, - 12,8,4,4,1,10,2,2,1,14,1,12,1,8,2,10, - 8,16,1,6,3,8,1,14,1,14,1,10,1,6,1,6, - 2,4,3,8,2,10,1,2,1,14,1,12,1,6,2,4, - 1,8,2,6,1,12,1,6,1,12,1,12,2,114,190,0, - 0,0,99,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,4,0,0,0,64,0,0,0,115,80,0,0,0, - 101,0,90,1,100,0,90,2,100,1,90,3,100,2,90,4, - 100,3,90,5,100,4,90,6,101,7,100,5,100,6,132,0, - 131,1,90,8,101,7,100,7,100,8,132,0,131,1,90,9, - 101,7,100,14,100,10,100,11,132,1,131,1,90,10,101,7, - 100,15,100,12,100,13,132,1,131,1,90,11,100,9,83,0, - 41,16,218,21,87,105,110,100,111,119,115,82,101,103,105,115, - 116,114,121,70,105,110,100,101,114,122,62,77,101,116,97,32, - 112,97,116,104,32,102,105,110,100,101,114,32,102,111,114,32, - 109,111,100,117,108,101,115,32,100,101,99,108,97,114,101,100, - 32,105,110,32,116,104,101,32,87,105,110,100,111,119,115,32, - 114,101,103,105,115,116,114,121,46,122,59,83,111,102,116,119, - 97,114,101,92,80,121,116,104,111,110,92,80,121,116,104,111, - 110,67,111,114,101,92,123,115,121,115,95,118,101,114,115,105, - 111,110,125,92,77,111,100,117,108,101,115,92,123,102,117,108, - 108,110,97,109,101,125,122,65,83,111,102,116,119,97,114,101, - 92,80,121,116,104,111,110,92,80,121,116,104,111,110,67,111, - 114,101,92,123,115,121,115,95,118,101,114,115,105,111,110,125, - 92,77,111,100,117,108,101,115,92,123,102,117,108,108,110,97, - 109,101,125,92,68,101,98,117,103,70,99,2,0,0,0,0, - 0,0,0,0,0,0,0,2,0,0,0,8,0,0,0,67, - 0,0,0,115,54,0,0,0,122,16,116,0,160,1,116,0, - 106,2,124,1,161,2,87,0,83,0,4,0,116,3,121,48, - 1,0,1,0,1,0,116,0,160,1,116,0,106,4,124,1, - 161,2,6,0,89,0,83,0,48,0,100,0,83,0,114,109, - 0,0,0,41,5,218,6,119,105,110,114,101,103,90,7,79, - 112,101,110,75,101,121,90,17,72,75,69,89,95,67,85,82, - 82,69,78,84,95,85,83,69,82,114,50,0,0,0,90,18, - 72,75,69,89,95,76,79,67,65,76,95,77,65,67,72,73, - 78,69,41,2,218,3,99,108,115,114,7,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,218,14,95, - 111,112,101,110,95,114,101,103,105,115,116,114,121,204,2,0, - 0,115,8,0,0,0,0,2,2,1,16,1,12,1,122,36, - 87,105,110,100,111,119,115,82,101,103,105,115,116,114,121,70, - 105,110,100,101,114,46,95,111,112,101,110,95,114,101,103,105, - 115,116,114,121,99,2,0,0,0,0,0,0,0,0,0,0, - 0,6,0,0,0,8,0,0,0,67,0,0,0,115,132,0, - 0,0,124,0,106,0,114,14,124,0,106,1,125,2,110,6, - 124,0,106,2,125,2,124,2,106,3,124,1,100,1,116,4, - 106,5,100,0,100,2,133,2,25,0,22,0,100,3,141,2, - 125,3,122,58,124,0,160,6,124,3,161,1,143,28,125,4, - 116,7,160,8,124,4,100,4,161,2,125,5,87,0,100,0, - 4,0,4,0,131,3,1,0,110,16,49,0,115,94,48,0, - 1,0,1,0,1,0,89,0,1,0,87,0,110,20,4,0, - 116,9,121,126,1,0,1,0,1,0,89,0,100,0,83,0, - 48,0,124,5,83,0,41,5,78,122,5,37,100,46,37,100, - 114,28,0,0,0,41,2,114,139,0,0,0,90,11,115,121, - 115,95,118,101,114,115,105,111,110,114,40,0,0,0,41,10, - 218,11,68,69,66,85,71,95,66,85,73,76,68,218,18,82, - 69,71,73,83,84,82,89,95,75,69,89,95,68,69,66,85, - 71,218,12,82,69,71,73,83,84,82,89,95,75,69,89,114, - 62,0,0,0,114,1,0,0,0,218,12,118,101,114,115,105, - 111,110,95,105,110,102,111,114,194,0,0,0,114,192,0,0, - 0,90,10,81,117,101,114,121,86,97,108,117,101,114,50,0, - 0,0,41,6,114,193,0,0,0,114,139,0,0,0,90,12, - 114,101,103,105,115,116,114,121,95,107,101,121,114,7,0,0, - 0,90,4,104,107,101,121,218,8,102,105,108,101,112,97,116, - 104,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 218,16,95,115,101,97,114,99,104,95,114,101,103,105,115,116, - 114,121,211,2,0,0,115,24,0,0,0,0,2,6,1,8, - 2,6,1,6,1,16,255,6,2,2,1,12,1,46,1,12, - 1,8,1,122,38,87,105,110,100,111,119,115,82,101,103,105, - 115,116,114,121,70,105,110,100,101,114,46,95,115,101,97,114, - 99,104,95,114,101,103,105,115,116,114,121,78,99,4,0,0, - 0,0,0,0,0,0,0,0,0,8,0,0,0,8,0,0, - 0,67,0,0,0,115,120,0,0,0,124,0,160,0,124,1, - 161,1,125,4,124,4,100,0,117,0,114,22,100,0,83,0, - 122,12,116,1,124,4,131,1,1,0,87,0,110,20,4,0, - 116,2,121,54,1,0,1,0,1,0,89,0,100,0,83,0, - 48,0,116,3,131,0,68,0,93,52,92,2,125,5,125,6, - 124,4,160,4,116,5,124,6,131,1,161,1,114,62,116,6, - 106,7,124,1,124,5,124,1,124,4,131,2,124,4,100,1, - 141,3,125,7,124,7,2,0,1,0,83,0,113,62,100,0, - 83,0,41,2,78,114,180,0,0,0,41,8,114,200,0,0, - 0,114,49,0,0,0,114,50,0,0,0,114,184,0,0,0, - 114,110,0,0,0,114,111,0,0,0,114,134,0,0,0,218, - 16,115,112,101,99,95,102,114,111,109,95,108,111,97,100,101, - 114,41,8,114,193,0,0,0,114,139,0,0,0,114,44,0, - 0,0,218,6,116,97,114,103,101,116,114,199,0,0,0,114, - 140,0,0,0,114,189,0,0,0,114,187,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,218,9,102, - 105,110,100,95,115,112,101,99,226,2,0,0,115,28,0,0, - 0,0,2,10,1,8,1,4,1,2,1,12,1,12,1,8, - 1,14,1,14,1,6,1,8,1,2,254,6,3,122,31,87, - 105,110,100,111,119,115,82,101,103,105,115,116,114,121,70,105, - 110,100,101,114,46,102,105,110,100,95,115,112,101,99,99,3, - 0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,4, - 0,0,0,67,0,0,0,115,34,0,0,0,124,0,160,0, - 124,1,124,2,161,2,125,3,124,3,100,1,117,1,114,26, - 124,3,106,1,83,0,100,1,83,0,100,1,83,0,41,2, - 122,108,70,105,110,100,32,109,111,100,117,108,101,32,110,97, - 109,101,100,32,105,110,32,116,104,101,32,114,101,103,105,115, - 116,114,121,46,10,10,32,32,32,32,32,32,32,32,84,104, - 105,115,32,109,101,116,104,111,100,32,105,115,32,100,101,112, - 114,101,99,97,116,101,100,46,32,32,85,115,101,32,101,120, - 101,99,95,109,111,100,117,108,101,40,41,32,105,110,115,116, - 101,97,100,46,10,10,32,32,32,32,32,32,32,32,78,169, - 2,114,203,0,0,0,114,140,0,0,0,169,4,114,193,0, - 0,0,114,139,0,0,0,114,44,0,0,0,114,187,0,0, + 0,0,0,218,13,95,99,108,97,115,115,105,102,121,95,112, + 121,99,244,1,0,0,115,28,0,0,0,0,16,12,1,8, + 1,16,1,12,1,16,1,12,1,10,1,12,1,8,1,16, + 2,8,1,16,1,16,1,114,152,0,0,0,99,5,0,0, + 0,0,0,0,0,0,0,0,0,6,0,0,0,4,0,0, + 0,67,0,0,0,115,120,0,0,0,116,0,124,0,100,1, + 100,2,133,2,25,0,131,1,124,1,100,3,64,0,107,3, + 114,62,100,4,124,3,155,2,157,2,125,5,116,1,160,2, + 100,5,124,5,161,2,1,0,116,3,124,5,102,1,105,0, + 124,4,164,1,142,1,130,1,124,2,100,6,117,1,114,116, + 116,0,124,0,100,2,100,7,133,2,25,0,131,1,124,2, + 100,3,64,0,107,3,114,116,116,3,100,4,124,3,155,2, + 157,2,102,1,105,0,124,4,164,1,142,1,130,1,100,6, + 83,0,41,8,97,7,2,0,0,86,97,108,105,100,97,116, + 101,32,97,32,112,121,99,32,97,103,97,105,110,115,116,32, + 116,104,101,32,115,111,117,114,99,101,32,108,97,115,116,45, + 109,111,100,105,102,105,101,100,32,116,105,109,101,46,10,10, + 32,32,32,32,42,100,97,116,97,42,32,105,115,32,116,104, + 101,32,99,111,110,116,101,110,116,115,32,111,102,32,116,104, + 101,32,112,121,99,32,102,105,108,101,46,32,40,79,110,108, + 121,32,116,104,101,32,102,105,114,115,116,32,49,54,32,98, + 121,116,101,115,32,97,114,101,10,32,32,32,32,114,101,113, + 117,105,114,101,100,46,41,10,10,32,32,32,32,42,115,111, + 117,114,99,101,95,109,116,105,109,101,42,32,105,115,32,116, + 104,101,32,108,97,115,116,32,109,111,100,105,102,105,101,100, + 32,116,105,109,101,115,116,97,109,112,32,111,102,32,116,104, + 101,32,115,111,117,114,99,101,32,102,105,108,101,46,10,10, + 32,32,32,32,42,115,111,117,114,99,101,95,115,105,122,101, + 42,32,105,115,32,78,111,110,101,32,111,114,32,116,104,101, + 32,115,105,122,101,32,111,102,32,116,104,101,32,115,111,117, + 114,99,101,32,102,105,108,101,32,105,110,32,98,121,116,101, + 115,46,10,10,32,32,32,32,42,110,97,109,101,42,32,105, + 115,32,116,104,101,32,110,97,109,101,32,111,102,32,116,104, + 101,32,109,111,100,117,108,101,32,98,101,105,110,103,32,105, + 109,112,111,114,116,101,100,46,32,73,116,32,105,115,32,117, + 115,101,100,32,102,111,114,32,108,111,103,103,105,110,103,46, + 10,10,32,32,32,32,42,101,120,99,95,100,101,116,97,105, + 108,115,42,32,105,115,32,97,32,100,105,99,116,105,111,110, + 97,114,121,32,112,97,115,115,101,100,32,116,111,32,73,109, + 112,111,114,116,69,114,114,111,114,32,105,102,32,105,116,32, + 114,97,105,115,101,100,32,102,111,114,10,32,32,32,32,105, + 109,112,114,111,118,101,100,32,100,101,98,117,103,103,105,110, + 103,46,10,10,32,32,32,32,65,110,32,73,109,112,111,114, + 116,69,114,114,111,114,32,105,115,32,114,97,105,115,101,100, + 32,105,102,32,116,104,101,32,98,121,116,101,99,111,100,101, + 32,105,115,32,115,116,97,108,101,46,10,10,32,32,32,32, + 114,146,0,0,0,233,12,0,0,0,114,15,0,0,0,122, + 22,98,121,116,101,99,111,100,101,32,105,115,32,115,116,97, + 108,101,32,102,111,114,32,114,144,0,0,0,78,114,145,0, + 0,0,41,4,114,27,0,0,0,114,134,0,0,0,114,149, + 0,0,0,114,117,0,0,0,41,6,114,26,0,0,0,218, + 12,115,111,117,114,99,101,95,109,116,105,109,101,218,11,115, + 111,117,114,99,101,95,115,105,122,101,114,116,0,0,0,114, + 151,0,0,0,114,92,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,218,23,95,118,97,108,105,100, + 97,116,101,95,116,105,109,101,115,116,97,109,112,95,112,121, + 99,21,2,0,0,115,16,0,0,0,0,19,24,1,10,1, + 12,1,16,1,8,1,22,255,2,2,114,156,0,0,0,99, + 4,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0, + 4,0,0,0,67,0,0,0,115,42,0,0,0,124,0,100, + 1,100,2,133,2,25,0,124,1,107,3,114,38,116,0,100, + 3,124,2,155,2,157,2,102,1,105,0,124,3,164,1,142, + 1,130,1,100,4,83,0,41,5,97,243,1,0,0,86,97, + 108,105,100,97,116,101,32,97,32,104,97,115,104,45,98,97, + 115,101,100,32,112,121,99,32,98,121,32,99,104,101,99,107, + 105,110,103,32,116,104,101,32,114,101,97,108,32,115,111,117, + 114,99,101,32,104,97,115,104,32,97,103,97,105,110,115,116, + 32,116,104,101,32,111,110,101,32,105,110,10,32,32,32,32, + 116,104,101,32,112,121,99,32,104,101,97,100,101,114,46,10, + 10,32,32,32,32,42,100,97,116,97,42,32,105,115,32,116, + 104,101,32,99,111,110,116,101,110,116,115,32,111,102,32,116, + 104,101,32,112,121,99,32,102,105,108,101,46,32,40,79,110, + 108,121,32,116,104,101,32,102,105,114,115,116,32,49,54,32, + 98,121,116,101,115,32,97,114,101,10,32,32,32,32,114,101, + 113,117,105,114,101,100,46,41,10,10,32,32,32,32,42,115, + 111,117,114,99,101,95,104,97,115,104,42,32,105,115,32,116, + 104,101,32,105,109,112,111,114,116,108,105,98,46,117,116,105, + 108,46,115,111,117,114,99,101,95,104,97,115,104,40,41,32, + 111,102,32,116,104,101,32,115,111,117,114,99,101,32,102,105, + 108,101,46,10,10,32,32,32,32,42,110,97,109,101,42,32, + 105,115,32,116,104,101,32,110,97,109,101,32,111,102,32,116, + 104,101,32,109,111,100,117,108,101,32,98,101,105,110,103,32, + 105,109,112,111,114,116,101,100,46,32,73,116,32,105,115,32, + 117,115,101,100,32,102,111,114,32,108,111,103,103,105,110,103, + 46,10,10,32,32,32,32,42,101,120,99,95,100,101,116,97, + 105,108,115,42,32,105,115,32,97,32,100,105,99,116,105,111, + 110,97,114,121,32,112,97,115,115,101,100,32,116,111,32,73, + 109,112,111,114,116,69,114,114,111,114,32,105,102,32,105,116, + 32,114,97,105,115,101,100,32,102,111,114,10,32,32,32,32, + 105,109,112,114,111,118,101,100,32,100,101,98,117,103,103,105, + 110,103,46,10,10,32,32,32,32,65,110,32,73,109,112,111, + 114,116,69,114,114,111,114,32,105,115,32,114,97,105,115,101, + 100,32,105,102,32,116,104,101,32,98,121,116,101,99,111,100, + 101,32,105,115,32,115,116,97,108,101,46,10,10,32,32,32, + 32,114,146,0,0,0,114,145,0,0,0,122,46,104,97,115, + 104,32,105,110,32,98,121,116,101,99,111,100,101,32,100,111, + 101,115,110,39,116,32,109,97,116,99,104,32,104,97,115,104, + 32,111,102,32,115,111,117,114,99,101,32,78,41,1,114,117, + 0,0,0,41,4,114,26,0,0,0,218,11,115,111,117,114, + 99,101,95,104,97,115,104,114,116,0,0,0,114,151,0,0, 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 218,11,102,105,110,100,95,109,111,100,117,108,101,242,2,0, - 0,115,8,0,0,0,0,7,12,1,8,1,6,2,122,33, - 87,105,110,100,111,119,115,82,101,103,105,115,116,114,121,70, - 105,110,100,101,114,46,102,105,110,100,95,109,111,100,117,108, - 101,41,2,78,78,41,1,78,41,12,114,125,0,0,0,114, - 124,0,0,0,114,126,0,0,0,114,127,0,0,0,114,197, - 0,0,0,114,196,0,0,0,114,195,0,0,0,218,11,99, - 108,97,115,115,109,101,116,104,111,100,114,194,0,0,0,114, - 200,0,0,0,114,203,0,0,0,114,206,0,0,0,114,5, + 218,18,95,118,97,108,105,100,97,116,101,95,104,97,115,104, + 95,112,121,99,49,2,0,0,115,12,0,0,0,0,17,16, + 1,2,1,8,255,4,2,2,254,114,158,0,0,0,99,4, + 0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,5, + 0,0,0,67,0,0,0,115,80,0,0,0,116,0,160,1, + 124,0,161,1,125,4,116,2,124,4,116,3,131,2,114,56, + 116,4,160,5,100,1,124,2,161,2,1,0,124,3,100,2, + 117,1,114,52,116,6,160,7,124,4,124,3,161,2,1,0, + 124,4,83,0,116,8,100,3,160,9,124,2,161,1,124,1, + 124,2,100,4,141,3,130,1,100,2,83,0,41,5,122,35, + 67,111,109,112,105,108,101,32,98,121,116,101,99,111,100,101, + 32,97,115,32,102,111,117,110,100,32,105,110,32,97,32,112, + 121,99,46,122,21,99,111,100,101,32,111,98,106,101,99,116, + 32,102,114,111,109,32,123,33,114,125,78,122,23,78,111,110, + 45,99,111,100,101,32,111,98,106,101,99,116,32,105,110,32, + 123,33,114,125,169,2,114,116,0,0,0,114,44,0,0,0, + 41,10,218,7,109,97,114,115,104,97,108,90,5,108,111,97, + 100,115,218,10,105,115,105,110,115,116,97,110,99,101,218,10, + 95,99,111,100,101,95,116,121,112,101,114,134,0,0,0,114, + 149,0,0,0,218,4,95,105,109,112,90,16,95,102,105,120, + 95,99,111,95,102,105,108,101,110,97,109,101,114,117,0,0, + 0,114,62,0,0,0,41,5,114,26,0,0,0,114,116,0, + 0,0,114,106,0,0,0,114,107,0,0,0,218,4,99,111, + 100,101,114,5,0,0,0,114,5,0,0,0,114,8,0,0, + 0,218,17,95,99,111,109,112,105,108,101,95,98,121,116,101, + 99,111,100,101,73,2,0,0,115,18,0,0,0,0,2,10, + 1,10,1,12,1,8,1,12,1,4,2,10,1,4,255,114, + 165,0,0,0,114,73,0,0,0,99,3,0,0,0,0,0, + 0,0,0,0,0,0,4,0,0,0,5,0,0,0,67,0, + 0,0,115,70,0,0,0,116,0,116,1,131,1,125,3,124, + 3,160,2,116,3,100,1,131,1,161,1,1,0,124,3,160, + 2,116,3,124,1,131,1,161,1,1,0,124,3,160,2,116, + 3,124,2,131,1,161,1,1,0,124,3,160,2,116,4,160, + 5,124,0,161,1,161,1,1,0,124,3,83,0,41,2,122, + 43,80,114,111,100,117,99,101,32,116,104,101,32,100,97,116, + 97,32,102,111,114,32,97,32,116,105,109,101,115,116,97,109, + 112,45,98,97,115,101,100,32,112,121,99,46,114,73,0,0, + 0,41,6,218,9,98,121,116,101,97,114,114,97,121,114,148, + 0,0,0,218,6,101,120,116,101,110,100,114,21,0,0,0, + 114,160,0,0,0,218,5,100,117,109,112,115,41,4,114,164, + 0,0,0,218,5,109,116,105,109,101,114,155,0,0,0,114, + 26,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,218,22,95,99,111,100,101,95,116,111,95,116,105, + 109,101,115,116,97,109,112,95,112,121,99,86,2,0,0,115, + 12,0,0,0,0,2,8,1,14,1,14,1,14,1,16,1, + 114,170,0,0,0,84,99,3,0,0,0,0,0,0,0,0, + 0,0,0,5,0,0,0,5,0,0,0,67,0,0,0,115, + 80,0,0,0,116,0,116,1,131,1,125,3,100,1,124,2, + 100,1,62,0,66,0,125,4,124,3,160,2,116,3,124,4, + 131,1,161,1,1,0,116,4,124,1,131,1,100,2,107,2, + 115,50,74,0,130,1,124,3,160,2,124,1,161,1,1,0, + 124,3,160,2,116,5,160,6,124,0,161,1,161,1,1,0, + 124,3,83,0,41,3,122,38,80,114,111,100,117,99,101,32, + 116,104,101,32,100,97,116,97,32,102,111,114,32,97,32,104, + 97,115,104,45,98,97,115,101,100,32,112,121,99,46,114,39, + 0,0,0,114,146,0,0,0,41,7,114,166,0,0,0,114, + 148,0,0,0,114,167,0,0,0,114,21,0,0,0,114,23, + 0,0,0,114,160,0,0,0,114,168,0,0,0,41,5,114, + 164,0,0,0,114,157,0,0,0,90,7,99,104,101,99,107, + 101,100,114,26,0,0,0,114,2,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,218,17,95,99,111, + 100,101,95,116,111,95,104,97,115,104,95,112,121,99,96,2, + 0,0,115,14,0,0,0,0,2,8,1,12,1,14,1,16, + 1,10,1,16,1,114,171,0,0,0,99,1,0,0,0,0, + 0,0,0,0,0,0,0,5,0,0,0,6,0,0,0,67, + 0,0,0,115,62,0,0,0,100,1,100,2,108,0,125,1, + 116,1,160,2,124,0,161,1,106,3,125,2,124,1,160,4, + 124,2,161,1,125,3,116,1,160,5,100,2,100,3,161,2, + 125,4,124,4,160,6,124,0,160,6,124,3,100,1,25,0, + 161,1,161,1,83,0,41,4,122,121,68,101,99,111,100,101, + 32,98,121,116,101,115,32,114,101,112,114,101,115,101,110,116, + 105,110,103,32,115,111,117,114,99,101,32,99,111,100,101,32, + 97,110,100,32,114,101,116,117,114,110,32,116,104,101,32,115, + 116,114,105,110,103,46,10,10,32,32,32,32,85,110,105,118, + 101,114,115,97,108,32,110,101,119,108,105,110,101,32,115,117, + 112,112,111,114,116,32,105,115,32,117,115,101,100,32,105,110, + 32,116,104,101,32,100,101,99,111,100,105,110,103,46,10,32, + 32,32,32,114,73,0,0,0,78,84,41,7,218,8,116,111, + 107,101,110,105,122,101,114,64,0,0,0,90,7,66,121,116, + 101,115,73,79,90,8,114,101,97,100,108,105,110,101,90,15, + 100,101,116,101,99,116,95,101,110,99,111,100,105,110,103,90, + 25,73,110,99,114,101,109,101,110,116,97,108,78,101,119,108, + 105,110,101,68,101,99,111,100,101,114,218,6,100,101,99,111, + 100,101,41,5,218,12,115,111,117,114,99,101,95,98,121,116, + 101,115,114,172,0,0,0,90,21,115,111,117,114,99,101,95, + 98,121,116,101,115,95,114,101,97,100,108,105,110,101,218,8, + 101,110,99,111,100,105,110,103,90,15,110,101,119,108,105,110, + 101,95,100,101,99,111,100,101,114,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,218,13,100,101,99,111,100,101, + 95,115,111,117,114,99,101,107,2,0,0,115,10,0,0,0, + 0,5,8,1,12,1,10,1,12,1,114,176,0,0,0,169, + 2,114,140,0,0,0,218,26,115,117,98,109,111,100,117,108, + 101,95,115,101,97,114,99,104,95,108,111,99,97,116,105,111, + 110,115,99,2,0,0,0,0,0,0,0,2,0,0,0,9, + 0,0,0,8,0,0,0,67,0,0,0,115,12,1,0,0, + 124,1,100,1,117,0,114,58,100,2,125,1,116,0,124,2, + 100,3,131,2,114,68,122,14,124,2,160,1,124,0,161,1, + 125,1,87,0,113,68,4,0,116,2,121,54,1,0,1,0, + 1,0,89,0,113,68,48,0,110,10,116,3,160,4,124,1, + 161,1,125,1,116,5,106,6,124,0,124,2,124,1,100,4, + 141,3,125,4,100,5,124,4,95,7,124,2,100,1,117,0, + 114,152,116,8,131,0,68,0,93,42,92,2,125,5,125,6, + 124,1,160,9,116,10,124,6,131,1,161,1,114,104,124,5, + 124,0,124,1,131,2,125,2,124,2,124,4,95,11,1,0, + 113,152,113,104,100,1,83,0,124,3,116,12,117,0,114,216, + 116,0,124,2,100,6,131,2,114,222,122,14,124,2,160,13, + 124,0,161,1,125,7,87,0,110,18,4,0,116,2,121,202, + 1,0,1,0,1,0,89,0,113,222,48,0,124,7,114,222, + 103,0,124,4,95,14,110,6,124,3,124,4,95,14,124,4, + 106,14,103,0,107,2,144,1,114,8,124,1,144,1,114,8, + 116,15,124,1,131,1,100,7,25,0,125,8,124,4,106,14, + 160,16,124,8,161,1,1,0,124,4,83,0,41,8,97,61, + 1,0,0,82,101,116,117,114,110,32,97,32,109,111,100,117, + 108,101,32,115,112,101,99,32,98,97,115,101,100,32,111,110, + 32,97,32,102,105,108,101,32,108,111,99,97,116,105,111,110, + 46,10,10,32,32,32,32,84,111,32,105,110,100,105,99,97, + 116,101,32,116,104,97,116,32,116,104,101,32,109,111,100,117, + 108,101,32,105,115,32,97,32,112,97,99,107,97,103,101,44, + 32,115,101,116,10,32,32,32,32,115,117,98,109,111,100,117, + 108,101,95,115,101,97,114,99,104,95,108,111,99,97,116,105, + 111,110,115,32,116,111,32,97,32,108,105,115,116,32,111,102, + 32,100,105,114,101,99,116,111,114,121,32,112,97,116,104,115, + 46,32,32,65,110,10,32,32,32,32,101,109,112,116,121,32, + 108,105,115,116,32,105,115,32,115,117,102,102,105,99,105,101, + 110,116,44,32,116,104,111,117,103,104,32,105,116,115,32,110, + 111,116,32,111,116,104,101,114,119,105,115,101,32,117,115,101, + 102,117,108,32,116,111,32,116,104,101,10,32,32,32,32,105, + 109,112,111,114,116,32,115,121,115,116,101,109,46,10,10,32, + 32,32,32,84,104,101,32,108,111,97,100,101,114,32,109,117, + 115,116,32,116,97,107,101,32,97,32,115,112,101,99,32,97, + 115,32,105,116,115,32,111,110,108,121,32,95,95,105,110,105, + 116,95,95,40,41,32,97,114,103,46,10,10,32,32,32,32, + 78,122,9,60,117,110,107,110,111,119,110,62,218,12,103,101, + 116,95,102,105,108,101,110,97,109,101,169,1,218,6,111,114, + 105,103,105,110,84,218,10,105,115,95,112,97,99,107,97,103, + 101,114,73,0,0,0,41,17,114,128,0,0,0,114,179,0, + 0,0,114,117,0,0,0,114,4,0,0,0,114,79,0,0, + 0,114,134,0,0,0,218,10,77,111,100,117,108,101,83,112, + 101,99,90,13,95,115,101,116,95,102,105,108,101,97,116,116, + 114,218,27,95,103,101,116,95,115,117,112,112,111,114,116,101, + 100,95,102,105,108,101,95,108,111,97,100,101,114,115,114,110, + 0,0,0,114,111,0,0,0,114,140,0,0,0,218,9,95, + 80,79,80,85,76,65,84,69,114,182,0,0,0,114,178,0, + 0,0,114,47,0,0,0,218,6,97,112,112,101,110,100,41, + 9,114,116,0,0,0,90,8,108,111,99,97,116,105,111,110, + 114,140,0,0,0,114,178,0,0,0,218,4,115,112,101,99, + 218,12,108,111,97,100,101,114,95,99,108,97,115,115,218,8, + 115,117,102,102,105,120,101,115,114,182,0,0,0,90,7,100, + 105,114,110,97,109,101,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,23,115,112,101,99,95,102,114,111,109, + 95,102,105,108,101,95,108,111,99,97,116,105,111,110,124,2, + 0,0,115,62,0,0,0,0,12,8,4,4,1,10,2,2, + 1,14,1,12,1,8,2,10,8,16,1,6,3,8,1,14, + 1,14,1,10,1,6,1,6,2,4,3,8,2,10,1,2, + 1,14,1,12,1,6,2,4,1,8,2,6,1,12,1,6, + 1,12,1,12,2,114,190,0,0,0,99,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,64, + 0,0,0,115,80,0,0,0,101,0,90,1,100,0,90,2, + 100,1,90,3,100,2,90,4,100,3,90,5,100,4,90,6, + 101,7,100,5,100,6,132,0,131,1,90,8,101,7,100,7, + 100,8,132,0,131,1,90,9,101,7,100,14,100,10,100,11, + 132,1,131,1,90,10,101,7,100,15,100,12,100,13,132,1, + 131,1,90,11,100,9,83,0,41,16,218,21,87,105,110,100, + 111,119,115,82,101,103,105,115,116,114,121,70,105,110,100,101, + 114,122,62,77,101,116,97,32,112,97,116,104,32,102,105,110, + 100,101,114,32,102,111,114,32,109,111,100,117,108,101,115,32, + 100,101,99,108,97,114,101,100,32,105,110,32,116,104,101,32, + 87,105,110,100,111,119,115,32,114,101,103,105,115,116,114,121, + 46,122,59,83,111,102,116,119,97,114,101,92,80,121,116,104, + 111,110,92,80,121,116,104,111,110,67,111,114,101,92,123,115, + 121,115,95,118,101,114,115,105,111,110,125,92,77,111,100,117, + 108,101,115,92,123,102,117,108,108,110,97,109,101,125,122,65, + 83,111,102,116,119,97,114,101,92,80,121,116,104,111,110,92, + 80,121,116,104,111,110,67,111,114,101,92,123,115,121,115,95, + 118,101,114,115,105,111,110,125,92,77,111,100,117,108,101,115, + 92,123,102,117,108,108,110,97,109,101,125,92,68,101,98,117, + 103,70,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,8,0,0,0,67,0,0,0,115,54,0,0,0, + 122,16,116,0,160,1,116,0,106,2,124,1,161,2,87,0, + 83,0,4,0,116,3,121,48,1,0,1,0,1,0,116,0, + 160,1,116,0,106,4,124,1,161,2,6,0,89,0,83,0, + 48,0,100,0,83,0,114,109,0,0,0,41,5,218,6,119, + 105,110,114,101,103,90,7,79,112,101,110,75,101,121,90,17, + 72,75,69,89,95,67,85,82,82,69,78,84,95,85,83,69, + 82,114,50,0,0,0,90,18,72,75,69,89,95,76,79,67, + 65,76,95,77,65,67,72,73,78,69,41,2,218,3,99,108, + 115,114,7,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,14,95,111,112,101,110,95,114,101,103, + 105,115,116,114,121,204,2,0,0,115,8,0,0,0,0,2, + 2,1,16,1,12,1,122,36,87,105,110,100,111,119,115,82, + 101,103,105,115,116,114,121,70,105,110,100,101,114,46,95,111, + 112,101,110,95,114,101,103,105,115,116,114,121,99,2,0,0, + 0,0,0,0,0,0,0,0,0,6,0,0,0,8,0,0, + 0,67,0,0,0,115,132,0,0,0,124,0,106,0,114,14, + 124,0,106,1,125,2,110,6,124,0,106,2,125,2,124,2, + 106,3,124,1,100,1,116,4,106,5,100,0,100,2,133,2, + 25,0,22,0,100,3,141,2,125,3,122,58,124,0,160,6, + 124,3,161,1,143,28,125,4,116,7,160,8,124,4,100,4, + 161,2,125,5,87,0,100,0,4,0,4,0,131,3,1,0, + 110,16,49,0,115,94,48,0,1,0,1,0,1,0,89,0, + 1,0,87,0,110,20,4,0,116,9,121,126,1,0,1,0, + 1,0,89,0,100,0,83,0,48,0,124,5,83,0,41,5, + 78,122,5,37,100,46,37,100,114,28,0,0,0,41,2,114, + 139,0,0,0,90,11,115,121,115,95,118,101,114,115,105,111, + 110,114,40,0,0,0,41,10,218,11,68,69,66,85,71,95, + 66,85,73,76,68,218,18,82,69,71,73,83,84,82,89,95, + 75,69,89,95,68,69,66,85,71,218,12,82,69,71,73,83, + 84,82,89,95,75,69,89,114,62,0,0,0,114,1,0,0, + 0,218,12,118,101,114,115,105,111,110,95,105,110,102,111,114, + 194,0,0,0,114,192,0,0,0,90,10,81,117,101,114,121, + 86,97,108,117,101,114,50,0,0,0,41,6,114,193,0,0, + 0,114,139,0,0,0,90,12,114,101,103,105,115,116,114,121, + 95,107,101,121,114,7,0,0,0,90,4,104,107,101,121,218, + 8,102,105,108,101,112,97,116,104,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,218,16,95,115,101,97,114,99, + 104,95,114,101,103,105,115,116,114,121,211,2,0,0,115,24, + 0,0,0,0,2,6,1,8,2,6,1,6,1,16,255,6, + 2,2,1,12,1,46,1,12,1,8,1,122,38,87,105,110, + 100,111,119,115,82,101,103,105,115,116,114,121,70,105,110,100, + 101,114,46,95,115,101,97,114,99,104,95,114,101,103,105,115, + 116,114,121,78,99,4,0,0,0,0,0,0,0,0,0,0, + 0,8,0,0,0,8,0,0,0,67,0,0,0,115,120,0, + 0,0,124,0,160,0,124,1,161,1,125,4,124,4,100,0, + 117,0,114,22,100,0,83,0,122,12,116,1,124,4,131,1, + 1,0,87,0,110,20,4,0,116,2,121,54,1,0,1,0, + 1,0,89,0,100,0,83,0,48,0,116,3,131,0,68,0, + 93,52,92,2,125,5,125,6,124,4,160,4,116,5,124,6, + 131,1,161,1,114,62,116,6,106,7,124,1,124,5,124,1, + 124,4,131,2,124,4,100,1,141,3,125,7,124,7,2,0, + 1,0,83,0,113,62,100,0,83,0,41,2,78,114,180,0, + 0,0,41,8,114,200,0,0,0,114,49,0,0,0,114,50, + 0,0,0,114,184,0,0,0,114,110,0,0,0,114,111,0, + 0,0,114,134,0,0,0,218,16,115,112,101,99,95,102,114, + 111,109,95,108,111,97,100,101,114,41,8,114,193,0,0,0, + 114,139,0,0,0,114,44,0,0,0,218,6,116,97,114,103, + 101,116,114,199,0,0,0,114,140,0,0,0,114,189,0,0, + 0,114,187,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,9,102,105,110,100,95,115,112,101,99, + 226,2,0,0,115,28,0,0,0,0,2,10,1,8,1,4, + 1,2,1,12,1,12,1,8,1,14,1,14,1,6,1,8, + 1,2,254,6,3,122,31,87,105,110,100,111,119,115,82,101, + 103,105,115,116,114,121,70,105,110,100,101,114,46,102,105,110, + 100,95,115,112,101,99,99,3,0,0,0,0,0,0,0,0, + 0,0,0,4,0,0,0,4,0,0,0,67,0,0,0,115, + 34,0,0,0,124,0,160,0,124,1,124,2,161,2,125,3, + 124,3,100,1,117,1,114,26,124,3,106,1,83,0,100,1, + 83,0,100,1,83,0,41,2,122,108,70,105,110,100,32,109, + 111,100,117,108,101,32,110,97,109,101,100,32,105,110,32,116, + 104,101,32,114,101,103,105,115,116,114,121,46,10,10,32,32, + 32,32,32,32,32,32,84,104,105,115,32,109,101,116,104,111, + 100,32,105,115,32,100,101,112,114,101,99,97,116,101,100,46, + 32,32,85,115,101,32,101,120,101,99,95,109,111,100,117,108, + 101,40,41,32,105,110,115,116,101,97,100,46,10,10,32,32, + 32,32,32,32,32,32,78,169,2,114,203,0,0,0,114,140, + 0,0,0,169,4,114,193,0,0,0,114,139,0,0,0,114, + 44,0,0,0,114,187,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,218,11,102,105,110,100,95,109, + 111,100,117,108,101,242,2,0,0,115,8,0,0,0,0,7, + 12,1,8,1,6,2,122,33,87,105,110,100,111,119,115,82, + 101,103,105,115,116,114,121,70,105,110,100,101,114,46,102,105, + 110,100,95,109,111,100,117,108,101,41,2,78,78,41,1,78, + 41,12,114,125,0,0,0,114,124,0,0,0,114,126,0,0, + 0,114,127,0,0,0,114,197,0,0,0,114,196,0,0,0, + 114,195,0,0,0,218,11,99,108,97,115,115,109,101,116,104, + 111,100,114,194,0,0,0,114,200,0,0,0,114,203,0,0, + 0,114,206,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,191,0,0,0,192, + 2,0,0,115,28,0,0,0,8,2,4,3,2,255,2,4, + 2,255,2,3,4,2,2,1,10,6,2,1,10,14,2,1, + 12,15,2,1,114,191,0,0,0,99,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,2,0,0,0,64,0, + 0,0,115,48,0,0,0,101,0,90,1,100,0,90,2,100, + 1,90,3,100,2,100,3,132,0,90,4,100,4,100,5,132, + 0,90,5,100,6,100,7,132,0,90,6,100,8,100,9,132, + 0,90,7,100,10,83,0,41,11,218,13,95,76,111,97,100, + 101,114,66,97,115,105,99,115,122,83,66,97,115,101,32,99, + 108,97,115,115,32,111,102,32,99,111,109,109,111,110,32,99, + 111,100,101,32,110,101,101,100,101,100,32,98,121,32,98,111, + 116,104,32,83,111,117,114,99,101,76,111,97,100,101,114,32, + 97,110,100,10,32,32,32,32,83,111,117,114,99,101,108,101, + 115,115,70,105,108,101,76,111,97,100,101,114,46,99,2,0, + 0,0,0,0,0,0,0,0,0,0,5,0,0,0,4,0, + 0,0,67,0,0,0,115,64,0,0,0,116,0,124,0,160, + 1,124,1,161,1,131,1,100,1,25,0,125,2,124,2,160, + 2,100,2,100,1,161,2,100,3,25,0,125,3,124,1,160, + 3,100,2,161,1,100,4,25,0,125,4,124,3,100,5,107, + 2,111,62,124,4,100,5,107,3,83,0,41,6,122,141,67, + 111,110,99,114,101,116,101,32,105,109,112,108,101,109,101,110, + 116,97,116,105,111,110,32,111,102,32,73,110,115,112,101,99, + 116,76,111,97,100,101,114,46,105,115,95,112,97,99,107,97, + 103,101,32,98,121,32,99,104,101,99,107,105,110,103,32,105, + 102,10,32,32,32,32,32,32,32,32,116,104,101,32,112,97, + 116,104,32,114,101,116,117,114,110,101,100,32,98,121,32,103, + 101,116,95,102,105,108,101,110,97,109,101,32,104,97,115,32, + 97,32,102,105,108,101,110,97,109,101,32,111,102,32,39,95, + 95,105,110,105,116,95,95,46,112,121,39,46,114,39,0,0, + 0,114,71,0,0,0,114,73,0,0,0,114,28,0,0,0, + 218,8,95,95,105,110,105,116,95,95,41,4,114,47,0,0, + 0,114,179,0,0,0,114,43,0,0,0,114,41,0,0,0, + 41,5,114,118,0,0,0,114,139,0,0,0,114,96,0,0, + 0,90,13,102,105,108,101,110,97,109,101,95,98,97,115,101, + 90,9,116,97,105,108,95,110,97,109,101,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,182,0,0,0,5, + 3,0,0,115,8,0,0,0,0,3,18,1,16,1,14,1, + 122,24,95,76,111,97,100,101,114,66,97,115,105,99,115,46, + 105,115,95,112,97,99,107,97,103,101,99,2,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, + 0,0,0,115,4,0,0,0,100,1,83,0,169,2,122,42, + 85,115,101,32,100,101,102,97,117,108,116,32,115,101,109,97, + 110,116,105,99,115,32,102,111,114,32,109,111,100,117,108,101, + 32,99,114,101,97,116,105,111,110,46,78,114,5,0,0,0, + 169,2,114,118,0,0,0,114,187,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,218,13,99,114,101, + 97,116,101,95,109,111,100,117,108,101,13,3,0,0,115,2, + 0,0,0,0,1,122,27,95,76,111,97,100,101,114,66,97, + 115,105,99,115,46,99,114,101,97,116,101,95,109,111,100,117, + 108,101,99,2,0,0,0,0,0,0,0,0,0,0,0,3, + 0,0,0,5,0,0,0,67,0,0,0,115,56,0,0,0, + 124,0,160,0,124,1,106,1,161,1,125,2,124,2,100,1, + 117,0,114,36,116,2,100,2,160,3,124,1,106,1,161,1, + 131,1,130,1,116,4,160,5,116,6,124,2,124,1,106,7, + 161,3,1,0,100,1,83,0,41,3,122,19,69,120,101,99, + 117,116,101,32,116,104,101,32,109,111,100,117,108,101,46,78, + 122,52,99,97,110,110,111,116,32,108,111,97,100,32,109,111, + 100,117,108,101,32,123,33,114,125,32,119,104,101,110,32,103, + 101,116,95,99,111,100,101,40,41,32,114,101,116,117,114,110, + 115,32,78,111,110,101,41,8,218,8,103,101,116,95,99,111, + 100,101,114,125,0,0,0,114,117,0,0,0,114,62,0,0, + 0,114,134,0,0,0,218,25,95,99,97,108,108,95,119,105, + 116,104,95,102,114,97,109,101,115,95,114,101,109,111,118,101, + 100,218,4,101,120,101,99,114,131,0,0,0,41,3,114,118, + 0,0,0,218,6,109,111,100,117,108,101,114,164,0,0,0, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, + 11,101,120,101,99,95,109,111,100,117,108,101,16,3,0,0, + 115,12,0,0,0,0,2,12,1,8,1,6,1,4,255,6, + 2,122,25,95,76,111,97,100,101,114,66,97,115,105,99,115, + 46,101,120,101,99,95,109,111,100,117,108,101,99,2,0,0, + 0,0,0,0,0,0,0,0,0,2,0,0,0,4,0,0, + 0,67,0,0,0,115,12,0,0,0,116,0,160,1,124,0, + 124,1,161,2,83,0,41,1,122,26,84,104,105,115,32,109, + 111,100,117,108,101,32,105,115,32,100,101,112,114,101,99,97, + 116,101,100,46,41,2,114,134,0,0,0,218,17,95,108,111, + 97,100,95,109,111,100,117,108,101,95,115,104,105,109,169,2, + 114,118,0,0,0,114,139,0,0,0,114,5,0,0,0,114, + 5,0,0,0,114,8,0,0,0,218,11,108,111,97,100,95, + 109,111,100,117,108,101,24,3,0,0,115,2,0,0,0,0, + 2,122,25,95,76,111,97,100,101,114,66,97,115,105,99,115, + 46,108,111,97,100,95,109,111,100,117,108,101,78,41,8,114, + 125,0,0,0,114,124,0,0,0,114,126,0,0,0,114,127, + 0,0,0,114,182,0,0,0,114,212,0,0,0,114,217,0, + 0,0,114,220,0,0,0,114,5,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,114,208,0,0,0, + 0,3,0,0,115,10,0,0,0,8,2,4,3,8,8,8, + 3,8,8,114,208,0,0,0,99,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,3,0,0,0,64,0,0, + 0,115,74,0,0,0,101,0,90,1,100,0,90,2,100,1, + 100,2,132,0,90,3,100,3,100,4,132,0,90,4,100,5, + 100,6,132,0,90,5,100,7,100,8,132,0,90,6,100,9, + 100,10,132,0,90,7,100,11,100,12,156,1,100,13,100,14, + 132,2,90,8,100,15,100,16,132,0,90,9,100,17,83,0, + 41,18,218,12,83,111,117,114,99,101,76,111,97,100,101,114, + 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,1,0,0,0,67,0,0,0,115,8,0,0,0,116,0, + 130,1,100,1,83,0,41,2,122,165,79,112,116,105,111,110, + 97,108,32,109,101,116,104,111,100,32,116,104,97,116,32,114, + 101,116,117,114,110,115,32,116,104,101,32,109,111,100,105,102, + 105,99,97,116,105,111,110,32,116,105,109,101,32,40,97,110, + 32,105,110,116,41,32,102,111,114,32,116,104,101,10,32,32, + 32,32,32,32,32,32,115,112,101,99,105,102,105,101,100,32, + 112,97,116,104,32,40,97,32,115,116,114,41,46,10,10,32, + 32,32,32,32,32,32,32,82,97,105,115,101,115,32,79,83, + 69,114,114,111,114,32,119,104,101,110,32,116,104,101,32,112, + 97,116,104,32,99,97,110,110,111,116,32,98,101,32,104,97, + 110,100,108,101,100,46,10,32,32,32,32,32,32,32,32,78, + 41,1,114,50,0,0,0,169,2,114,118,0,0,0,114,44, 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,114,191,0,0,0,192,2,0,0,115,28,0,0,0, - 8,2,4,3,2,255,2,4,2,255,2,3,4,2,2,1, - 10,6,2,1,10,14,2,1,12,15,2,1,114,191,0,0, - 0,99,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,64,0,0,0,115,48,0,0,0,101, - 0,90,1,100,0,90,2,100,1,90,3,100,2,100,3,132, - 0,90,4,100,4,100,5,132,0,90,5,100,6,100,7,132, - 0,90,6,100,8,100,9,132,0,90,7,100,10,83,0,41, - 11,218,13,95,76,111,97,100,101,114,66,97,115,105,99,115, - 122,83,66,97,115,101,32,99,108,97,115,115,32,111,102,32, - 99,111,109,109,111,110,32,99,111,100,101,32,110,101,101,100, - 101,100,32,98,121,32,98,111,116,104,32,83,111,117,114,99, - 101,76,111,97,100,101,114,32,97,110,100,10,32,32,32,32, - 83,111,117,114,99,101,108,101,115,115,70,105,108,101,76,111, - 97,100,101,114,46,99,2,0,0,0,0,0,0,0,0,0, - 0,0,5,0,0,0,4,0,0,0,67,0,0,0,115,64, - 0,0,0,116,0,124,0,160,1,124,1,161,1,131,1,100, - 1,25,0,125,2,124,2,160,2,100,2,100,1,161,2,100, - 3,25,0,125,3,124,1,160,3,100,2,161,1,100,4,25, - 0,125,4,124,3,100,5,107,2,111,62,124,4,100,5,107, - 3,83,0,41,6,122,141,67,111,110,99,114,101,116,101,32, - 105,109,112,108,101,109,101,110,116,97,116,105,111,110,32,111, - 102,32,73,110,115,112,101,99,116,76,111,97,100,101,114,46, - 105,115,95,112,97,99,107,97,103,101,32,98,121,32,99,104, - 101,99,107,105,110,103,32,105,102,10,32,32,32,32,32,32, - 32,32,116,104,101,32,112,97,116,104,32,114,101,116,117,114, - 110,101,100,32,98,121,32,103,101,116,95,102,105,108,101,110, - 97,109,101,32,104,97,115,32,97,32,102,105,108,101,110,97, - 109,101,32,111,102,32,39,95,95,105,110,105,116,95,95,46, - 112,121,39,46,114,39,0,0,0,114,71,0,0,0,114,73, - 0,0,0,114,28,0,0,0,218,8,95,95,105,110,105,116, - 95,95,41,4,114,47,0,0,0,114,179,0,0,0,114,43, - 0,0,0,114,41,0,0,0,41,5,114,118,0,0,0,114, - 139,0,0,0,114,96,0,0,0,90,13,102,105,108,101,110, - 97,109,101,95,98,97,115,101,90,9,116,97,105,108,95,110, - 97,109,101,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,114,182,0,0,0,5,3,0,0,115,8,0,0,0, - 0,3,18,1,16,1,14,1,122,24,95,76,111,97,100,101, - 114,66,97,115,105,99,115,46,105,115,95,112,97,99,107,97, - 103,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,1,0,0,0,67,0,0,0,115,4,0,0,0, - 100,1,83,0,169,2,122,42,85,115,101,32,100,101,102,97, - 117,108,116,32,115,101,109,97,110,116,105,99,115,32,102,111, - 114,32,109,111,100,117,108,101,32,99,114,101,97,116,105,111, - 110,46,78,114,5,0,0,0,169,2,114,118,0,0,0,114, - 187,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,218,13,99,114,101,97,116,101,95,109,111,100,117, - 108,101,13,3,0,0,115,2,0,0,0,0,1,122,27,95, - 76,111,97,100,101,114,66,97,115,105,99,115,46,99,114,101, - 97,116,101,95,109,111,100,117,108,101,99,2,0,0,0,0, - 0,0,0,0,0,0,0,3,0,0,0,5,0,0,0,67, - 0,0,0,115,56,0,0,0,124,0,160,0,124,1,106,1, - 161,1,125,2,124,2,100,1,117,0,114,36,116,2,100,2, - 160,3,124,1,106,1,161,1,131,1,130,1,116,4,160,5, - 116,6,124,2,124,1,106,7,161,3,1,0,100,1,83,0, - 41,3,122,19,69,120,101,99,117,116,101,32,116,104,101,32, - 109,111,100,117,108,101,46,78,122,52,99,97,110,110,111,116, - 32,108,111,97,100,32,109,111,100,117,108,101,32,123,33,114, - 125,32,119,104,101,110,32,103,101,116,95,99,111,100,101,40, - 41,32,114,101,116,117,114,110,115,32,78,111,110,101,41,8, - 218,8,103,101,116,95,99,111,100,101,114,125,0,0,0,114, - 117,0,0,0,114,62,0,0,0,114,134,0,0,0,218,25, - 95,99,97,108,108,95,119,105,116,104,95,102,114,97,109,101, - 115,95,114,101,109,111,118,101,100,218,4,101,120,101,99,114, - 131,0,0,0,41,3,114,118,0,0,0,218,6,109,111,100, - 117,108,101,114,164,0,0,0,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,218,11,101,120,101,99,95,109,111, - 100,117,108,101,16,3,0,0,115,12,0,0,0,0,2,12, - 1,8,1,6,1,4,255,6,2,122,25,95,76,111,97,100, - 101,114,66,97,115,105,99,115,46,101,120,101,99,95,109,111, - 100,117,108,101,99,2,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,4,0,0,0,67,0,0,0,115,12,0, - 0,0,116,0,160,1,124,0,124,1,161,2,83,0,41,1, - 122,26,84,104,105,115,32,109,111,100,117,108,101,32,105,115, - 32,100,101,112,114,101,99,97,116,101,100,46,41,2,114,134, - 0,0,0,218,17,95,108,111,97,100,95,109,111,100,117,108, - 101,95,115,104,105,109,169,2,114,118,0,0,0,114,139,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,218,11,108,111,97,100,95,109,111,100,117,108,101,24,3, - 0,0,115,2,0,0,0,0,2,122,25,95,76,111,97,100, - 101,114,66,97,115,105,99,115,46,108,111,97,100,95,109,111, - 100,117,108,101,78,41,8,114,125,0,0,0,114,124,0,0, - 0,114,126,0,0,0,114,127,0,0,0,114,182,0,0,0, - 114,212,0,0,0,114,217,0,0,0,114,220,0,0,0,114, - 5,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,114,208,0,0,0,0,3,0,0,115,10,0,0, - 0,8,2,4,3,8,8,8,3,8,8,114,208,0,0,0, - 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,3,0,0,0,64,0,0,0,115,74,0,0,0,101,0, - 90,1,100,0,90,2,100,1,100,2,132,0,90,3,100,3, - 100,4,132,0,90,4,100,5,100,6,132,0,90,5,100,7, - 100,8,132,0,90,6,100,9,100,10,132,0,90,7,100,11, - 100,12,156,1,100,13,100,14,132,2,90,8,100,15,100,16, - 132,0,90,9,100,17,83,0,41,18,218,12,83,111,117,114, - 99,101,76,111,97,100,101,114,99,2,0,0,0,0,0,0, - 0,0,0,0,0,2,0,0,0,1,0,0,0,67,0,0, - 0,115,8,0,0,0,116,0,130,1,100,1,83,0,41,2, - 122,165,79,112,116,105,111,110,97,108,32,109,101,116,104,111, - 100,32,116,104,97,116,32,114,101,116,117,114,110,115,32,116, - 104,101,32,109,111,100,105,102,105,99,97,116,105,111,110,32, - 116,105,109,101,32,40,97,110,32,105,110,116,41,32,102,111, - 114,32,116,104,101,10,32,32,32,32,32,32,32,32,115,112, - 101,99,105,102,105,101,100,32,112,97,116,104,32,40,97,32, - 115,116,114,41,46,10,10,32,32,32,32,32,32,32,32,82, - 97,105,115,101,115,32,79,83,69,114,114,111,114,32,119,104, - 101,110,32,116,104,101,32,112,97,116,104,32,99,97,110,110, - 111,116,32,98,101,32,104,97,110,100,108,101,100,46,10,32, - 32,32,32,32,32,32,32,78,41,1,114,50,0,0,0,169, - 2,114,118,0,0,0,114,44,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,218,10,112,97,116,104, - 95,109,116,105,109,101,31,3,0,0,115,2,0,0,0,0, - 6,122,23,83,111,117,114,99,101,76,111,97,100,101,114,46, - 112,97,116,104,95,109,116,105,109,101,99,2,0,0,0,0, - 0,0,0,0,0,0,0,2,0,0,0,4,0,0,0,67, - 0,0,0,115,14,0,0,0,100,1,124,0,160,0,124,1, - 161,1,105,1,83,0,41,2,97,158,1,0,0,79,112,116, - 105,111,110,97,108,32,109,101,116,104,111,100,32,114,101,116, - 117,114,110,105,110,103,32,97,32,109,101,116,97,100,97,116, - 97,32,100,105,99,116,32,102,111,114,32,116,104,101,32,115, - 112,101,99,105,102,105,101,100,10,32,32,32,32,32,32,32, + 0,0,218,10,112,97,116,104,95,109,116,105,109,101,31,3, + 0,0,115,2,0,0,0,0,6,122,23,83,111,117,114,99, + 101,76,111,97,100,101,114,46,112,97,116,104,95,109,116,105, + 109,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,4,0,0,0,67,0,0,0,115,14,0,0,0, + 100,1,124,0,160,0,124,1,161,1,105,1,83,0,41,2, + 97,158,1,0,0,79,112,116,105,111,110,97,108,32,109,101, + 116,104,111,100,32,114,101,116,117,114,110,105,110,103,32,97, + 32,109,101,116,97,100,97,116,97,32,100,105,99,116,32,102, + 111,114,32,116,104,101,32,115,112,101,99,105,102,105,101,100, + 10,32,32,32,32,32,32,32,32,112,97,116,104,32,40,97, + 32,115,116,114,41,46,10,10,32,32,32,32,32,32,32,32, + 80,111,115,115,105,98,108,101,32,107,101,121,115,58,10,32, + 32,32,32,32,32,32,32,45,32,39,109,116,105,109,101,39, + 32,40,109,97,110,100,97,116,111,114,121,41,32,105,115,32, + 116,104,101,32,110,117,109,101,114,105,99,32,116,105,109,101, + 115,116,97,109,112,32,111,102,32,108,97,115,116,32,115,111, + 117,114,99,101,10,32,32,32,32,32,32,32,32,32,32,99, + 111,100,101,32,109,111,100,105,102,105,99,97,116,105,111,110, + 59,10,32,32,32,32,32,32,32,32,45,32,39,115,105,122, + 101,39,32,40,111,112,116,105,111,110,97,108,41,32,105,115, + 32,116,104,101,32,115,105,122,101,32,105,110,32,98,121,116, + 101,115,32,111,102,32,116,104,101,32,115,111,117,114,99,101, + 32,99,111,100,101,46,10,10,32,32,32,32,32,32,32,32, + 73,109,112,108,101,109,101,110,116,105,110,103,32,116,104,105, + 115,32,109,101,116,104,111,100,32,97,108,108,111,119,115,32, + 116,104,101,32,108,111,97,100,101,114,32,116,111,32,114,101, + 97,100,32,98,121,116,101,99,111,100,101,32,102,105,108,101, + 115,46,10,32,32,32,32,32,32,32,32,82,97,105,115,101, + 115,32,79,83,69,114,114,111,114,32,119,104,101,110,32,116, + 104,101,32,112,97,116,104,32,99,97,110,110,111,116,32,98, + 101,32,104,97,110,100,108,101,100,46,10,32,32,32,32,32, + 32,32,32,114,169,0,0,0,41,1,114,223,0,0,0,114, + 222,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,218,10,112,97,116,104,95,115,116,97,116,115,39, + 3,0,0,115,2,0,0,0,0,12,122,23,83,111,117,114, + 99,101,76,111,97,100,101,114,46,112,97,116,104,95,115,116, + 97,116,115,99,4,0,0,0,0,0,0,0,0,0,0,0, + 4,0,0,0,4,0,0,0,67,0,0,0,115,12,0,0, + 0,124,0,160,0,124,2,124,3,161,2,83,0,41,1,122, + 228,79,112,116,105,111,110,97,108,32,109,101,116,104,111,100, + 32,119,104,105,99,104,32,119,114,105,116,101,115,32,100,97, + 116,97,32,40,98,121,116,101,115,41,32,116,111,32,97,32, + 102,105,108,101,32,112,97,116,104,32,40,97,32,115,116,114, + 41,46,10,10,32,32,32,32,32,32,32,32,73,109,112,108, + 101,109,101,110,116,105,110,103,32,116,104,105,115,32,109,101, + 116,104,111,100,32,97,108,108,111,119,115,32,102,111,114,32, + 116,104,101,32,119,114,105,116,105,110,103,32,111,102,32,98, + 121,116,101,99,111,100,101,32,102,105,108,101,115,46,10,10, + 32,32,32,32,32,32,32,32,84,104,101,32,115,111,117,114, + 99,101,32,112,97,116,104,32,105,115,32,110,101,101,100,101, + 100,32,105,110,32,111,114,100,101,114,32,116,111,32,99,111, + 114,114,101,99,116,108,121,32,116,114,97,110,115,102,101,114, + 32,112,101,114,109,105,115,115,105,111,110,115,10,32,32,32, + 32,32,32,32,32,41,1,218,8,115,101,116,95,100,97,116, + 97,41,4,114,118,0,0,0,114,107,0,0,0,90,10,99, + 97,99,104,101,95,112,97,116,104,114,26,0,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,15,95, + 99,97,99,104,101,95,98,121,116,101,99,111,100,101,53,3, + 0,0,115,2,0,0,0,0,8,122,28,83,111,117,114,99, + 101,76,111,97,100,101,114,46,95,99,97,99,104,101,95,98, + 121,116,101,99,111,100,101,99,3,0,0,0,0,0,0,0, + 0,0,0,0,3,0,0,0,1,0,0,0,67,0,0,0, + 115,4,0,0,0,100,1,83,0,41,2,122,150,79,112,116, + 105,111,110,97,108,32,109,101,116,104,111,100,32,119,104,105, + 99,104,32,119,114,105,116,101,115,32,100,97,116,97,32,40, + 98,121,116,101,115,41,32,116,111,32,97,32,102,105,108,101, 32,112,97,116,104,32,40,97,32,115,116,114,41,46,10,10, - 32,32,32,32,32,32,32,32,80,111,115,115,105,98,108,101, - 32,107,101,121,115,58,10,32,32,32,32,32,32,32,32,45, - 32,39,109,116,105,109,101,39,32,40,109,97,110,100,97,116, - 111,114,121,41,32,105,115,32,116,104,101,32,110,117,109,101, - 114,105,99,32,116,105,109,101,115,116,97,109,112,32,111,102, - 32,108,97,115,116,32,115,111,117,114,99,101,10,32,32,32, - 32,32,32,32,32,32,32,99,111,100,101,32,109,111,100,105, - 102,105,99,97,116,105,111,110,59,10,32,32,32,32,32,32, - 32,32,45,32,39,115,105,122,101,39,32,40,111,112,116,105, - 111,110,97,108,41,32,105,115,32,116,104,101,32,115,105,122, - 101,32,105,110,32,98,121,116,101,115,32,111,102,32,116,104, - 101,32,115,111,117,114,99,101,32,99,111,100,101,46,10,10, 32,32,32,32,32,32,32,32,73,109,112,108,101,109,101,110, 116,105,110,103,32,116,104,105,115,32,109,101,116,104,111,100, - 32,97,108,108,111,119,115,32,116,104,101,32,108,111,97,100, - 101,114,32,116,111,32,114,101,97,100,32,98,121,116,101,99, + 32,97,108,108,111,119,115,32,102,111,114,32,116,104,101,32, + 119,114,105,116,105,110,103,32,111,102,32,98,121,116,101,99, 111,100,101,32,102,105,108,101,115,46,10,32,32,32,32,32, - 32,32,32,82,97,105,115,101,115,32,79,83,69,114,114,111, - 114,32,119,104,101,110,32,116,104,101,32,112,97,116,104,32, - 99,97,110,110,111,116,32,98,101,32,104,97,110,100,108,101, - 100,46,10,32,32,32,32,32,32,32,32,114,169,0,0,0, - 41,1,114,223,0,0,0,114,222,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,218,10,112,97,116, - 104,95,115,116,97,116,115,39,3,0,0,115,2,0,0,0, - 0,12,122,23,83,111,117,114,99,101,76,111,97,100,101,114, - 46,112,97,116,104,95,115,116,97,116,115,99,4,0,0,0, - 0,0,0,0,0,0,0,0,4,0,0,0,4,0,0,0, - 67,0,0,0,115,12,0,0,0,124,0,160,0,124,2,124, - 3,161,2,83,0,41,1,122,228,79,112,116,105,111,110,97, - 108,32,109,101,116,104,111,100,32,119,104,105,99,104,32,119, - 114,105,116,101,115,32,100,97,116,97,32,40,98,121,116,101, - 115,41,32,116,111,32,97,32,102,105,108,101,32,112,97,116, - 104,32,40,97,32,115,116,114,41,46,10,10,32,32,32,32, - 32,32,32,32,73,109,112,108,101,109,101,110,116,105,110,103, - 32,116,104,105,115,32,109,101,116,104,111,100,32,97,108,108, - 111,119,115,32,102,111,114,32,116,104,101,32,119,114,105,116, - 105,110,103,32,111,102,32,98,121,116,101,99,111,100,101,32, - 102,105,108,101,115,46,10,10,32,32,32,32,32,32,32,32, - 84,104,101,32,115,111,117,114,99,101,32,112,97,116,104,32, - 105,115,32,110,101,101,100,101,100,32,105,110,32,111,114,100, - 101,114,32,116,111,32,99,111,114,114,101,99,116,108,121,32, - 116,114,97,110,115,102,101,114,32,112,101,114,109,105,115,115, - 105,111,110,115,10,32,32,32,32,32,32,32,32,41,1,218, - 8,115,101,116,95,100,97,116,97,41,4,114,118,0,0,0, - 114,107,0,0,0,90,10,99,97,99,104,101,95,112,97,116, - 104,114,26,0,0,0,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,218,15,95,99,97,99,104,101,95,98,121, - 116,101,99,111,100,101,53,3,0,0,115,2,0,0,0,0, - 8,122,28,83,111,117,114,99,101,76,111,97,100,101,114,46, - 95,99,97,99,104,101,95,98,121,116,101,99,111,100,101,99, - 3,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0, - 1,0,0,0,67,0,0,0,115,4,0,0,0,100,1,83, - 0,41,2,122,150,79,112,116,105,111,110,97,108,32,109,101, - 116,104,111,100,32,119,104,105,99,104,32,119,114,105,116,101, - 115,32,100,97,116,97,32,40,98,121,116,101,115,41,32,116, - 111,32,97,32,102,105,108,101,32,112,97,116,104,32,40,97, - 32,115,116,114,41,46,10,10,32,32,32,32,32,32,32,32, - 73,109,112,108,101,109,101,110,116,105,110,103,32,116,104,105, - 115,32,109,101,116,104,111,100,32,97,108,108,111,119,115,32, - 102,111,114,32,116,104,101,32,119,114,105,116,105,110,103,32, - 111,102,32,98,121,116,101,99,111,100,101,32,102,105,108,101, - 115,46,10,32,32,32,32,32,32,32,32,78,114,5,0,0, - 0,41,3,114,118,0,0,0,114,44,0,0,0,114,26,0, - 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, - 0,114,225,0,0,0,63,3,0,0,115,2,0,0,0,0, - 1,122,21,83,111,117,114,99,101,76,111,97,100,101,114,46, - 115,101,116,95,100,97,116,97,99,2,0,0,0,0,0,0, - 0,0,0,0,0,5,0,0,0,10,0,0,0,67,0,0, - 0,115,84,0,0,0,124,0,160,0,124,1,161,1,125,2, - 122,14,124,0,160,1,124,2,161,1,125,3,87,0,110,50, - 4,0,116,2,121,74,1,0,125,4,1,0,122,26,116,3, - 100,1,124,1,100,2,141,2,124,4,130,2,87,0,89,0, - 100,3,125,4,126,4,110,10,100,3,125,4,126,4,48,0, - 48,0,116,4,124,3,131,1,83,0,41,4,122,52,67,111, - 110,99,114,101,116,101,32,105,109,112,108,101,109,101,110,116, - 97,116,105,111,110,32,111,102,32,73,110,115,112,101,99,116, - 76,111,97,100,101,114,46,103,101,116,95,115,111,117,114,99, - 101,46,122,39,115,111,117,114,99,101,32,110,111,116,32,97, - 118,97,105,108,97,98,108,101,32,116,104,114,111,117,103,104, - 32,103,101,116,95,100,97,116,97,40,41,114,115,0,0,0, - 78,41,5,114,179,0,0,0,218,8,103,101,116,95,100,97, - 116,97,114,50,0,0,0,114,117,0,0,0,114,176,0,0, - 0,41,5,114,118,0,0,0,114,139,0,0,0,114,44,0, - 0,0,114,174,0,0,0,218,3,101,120,99,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,218,10,103,101,116, - 95,115,111,117,114,99,101,70,3,0,0,115,20,0,0,0, - 0,2,10,1,2,1,14,1,14,1,4,1,2,255,4,1, - 2,255,24,2,122,23,83,111,117,114,99,101,76,111,97,100, - 101,114,46,103,101,116,95,115,111,117,114,99,101,114,104,0, - 0,0,41,1,218,9,95,111,112,116,105,109,105,122,101,99, - 3,0,0,0,0,0,0,0,1,0,0,0,4,0,0,0, - 8,0,0,0,67,0,0,0,115,22,0,0,0,116,0,106, - 1,116,2,124,1,124,2,100,1,100,2,124,3,100,3,141, - 6,83,0,41,4,122,130,82,101,116,117,114,110,32,116,104, - 101,32,99,111,100,101,32,111,98,106,101,99,116,32,99,111, - 109,112,105,108,101,100,32,102,114,111,109,32,115,111,117,114, - 99,101,46,10,10,32,32,32,32,32,32,32,32,84,104,101, - 32,39,100,97,116,97,39,32,97,114,103,117,109,101,110,116, - 32,99,97,110,32,98,101,32,97,110,121,32,111,98,106,101, - 99,116,32,116,121,112,101,32,116,104,97,116,32,99,111,109, - 112,105,108,101,40,41,32,115,117,112,112,111,114,116,115,46, - 10,32,32,32,32,32,32,32,32,114,215,0,0,0,84,41, - 2,218,12,100,111,110,116,95,105,110,104,101,114,105,116,114, - 83,0,0,0,41,3,114,134,0,0,0,114,214,0,0,0, - 218,7,99,111,109,112,105,108,101,41,4,114,118,0,0,0, - 114,26,0,0,0,114,44,0,0,0,114,230,0,0,0,114, - 5,0,0,0,114,5,0,0,0,114,8,0,0,0,218,14, - 115,111,117,114,99,101,95,116,111,95,99,111,100,101,80,3, - 0,0,115,6,0,0,0,0,5,12,1,4,255,122,27,83, - 111,117,114,99,101,76,111,97,100,101,114,46,115,111,117,114, - 99,101,95,116,111,95,99,111,100,101,99,2,0,0,0,0, - 0,0,0,0,0,0,0,15,0,0,0,9,0,0,0,67, - 0,0,0,115,24,2,0,0,124,0,160,0,124,1,161,1, - 125,2,100,1,125,3,100,1,125,4,100,1,125,5,100,2, - 125,6,100,3,125,7,122,12,116,1,124,2,131,1,125,8, - 87,0,110,24,4,0,116,2,121,66,1,0,1,0,1,0, - 100,1,125,8,89,0,144,1,110,42,48,0,122,14,124,0, - 160,3,124,2,161,1,125,9,87,0,110,20,4,0,116,4, - 121,102,1,0,1,0,1,0,89,0,144,1,110,6,48,0, - 116,5,124,9,100,4,25,0,131,1,125,3,122,14,124,0, - 160,6,124,8,161,1,125,10,87,0,110,18,4,0,116,4, - 121,148,1,0,1,0,1,0,89,0,110,216,48,0,124,1, - 124,8,100,5,156,2,125,11,122,148,116,7,124,10,124,1, - 124,11,131,3,125,12,116,8,124,10,131,1,100,6,100,1, - 133,2,25,0,125,13,124,12,100,7,64,0,100,8,107,3, - 125,6,124,6,144,1,114,30,124,12,100,9,64,0,100,8, - 107,3,125,7,116,9,106,10,100,10,107,3,144,1,114,50, - 124,7,115,248,116,9,106,10,100,11,107,2,144,1,114,50, - 124,0,160,6,124,2,161,1,125,4,116,9,160,11,116,12, - 124,4,161,2,125,5,116,13,124,10,124,5,124,1,124,11, - 131,4,1,0,110,20,116,14,124,10,124,3,124,9,100,12, - 25,0,124,1,124,11,131,5,1,0,87,0,110,24,4,0, - 116,15,116,16,102,2,144,1,121,76,1,0,1,0,1,0, - 89,0,110,32,48,0,116,17,160,18,100,13,124,8,124,2, - 161,3,1,0,116,19,124,13,124,1,124,8,124,2,100,14, - 141,4,83,0,124,4,100,1,117,0,144,1,114,128,124,0, - 160,6,124,2,161,1,125,4,124,0,160,20,124,4,124,2, - 161,2,125,14,116,17,160,18,100,15,124,2,161,2,1,0, - 116,21,106,22,144,2,115,20,124,8,100,1,117,1,144,2, - 114,20,124,3,100,1,117,1,144,2,114,20,124,6,144,1, - 114,220,124,5,100,1,117,0,144,1,114,206,116,9,160,11, - 124,4,161,1,125,5,116,23,124,14,124,5,124,7,131,3, - 125,10,110,16,116,24,124,14,124,3,116,25,124,4,131,1, - 131,3,125,10,122,18,124,0,160,26,124,2,124,8,124,10, - 161,3,1,0,87,0,110,20,4,0,116,2,144,2,121,18, - 1,0,1,0,1,0,89,0,110,2,48,0,124,14,83,0, - 41,16,122,190,67,111,110,99,114,101,116,101,32,105,109,112, - 108,101,109,101,110,116,97,116,105,111,110,32,111,102,32,73, - 110,115,112,101,99,116,76,111,97,100,101,114,46,103,101,116, - 95,99,111,100,101,46,10,10,32,32,32,32,32,32,32,32, - 82,101,97,100,105,110,103,32,111,102,32,98,121,116,101,99, - 111,100,101,32,114,101,113,117,105,114,101,115,32,112,97,116, - 104,95,115,116,97,116,115,32,116,111,32,98,101,32,105,109, - 112,108,101,109,101,110,116,101,100,46,32,84,111,32,119,114, - 105,116,101,10,32,32,32,32,32,32,32,32,98,121,116,101, - 99,111,100,101,44,32,115,101,116,95,100,97,116,97,32,109, - 117,115,116,32,97,108,115,111,32,98,101,32,105,109,112,108, - 101,109,101,110,116,101,100,46,10,10,32,32,32,32,32,32, - 32,32,78,70,84,114,169,0,0,0,114,159,0,0,0,114, - 145,0,0,0,114,39,0,0,0,114,73,0,0,0,114,28, - 0,0,0,90,5,110,101,118,101,114,90,6,97,108,119,97, - 121,115,218,4,115,105,122,101,122,13,123,125,32,109,97,116, - 99,104,101,115,32,123,125,41,3,114,116,0,0,0,114,106, - 0,0,0,114,107,0,0,0,122,19,99,111,100,101,32,111, - 98,106,101,99,116,32,102,114,111,109,32,123,125,41,27,114, - 179,0,0,0,114,97,0,0,0,114,82,0,0,0,114,224, - 0,0,0,114,50,0,0,0,114,18,0,0,0,114,227,0, - 0,0,114,152,0,0,0,218,10,109,101,109,111,114,121,118, - 105,101,119,114,163,0,0,0,90,21,99,104,101,99,107,95, - 104,97,115,104,95,98,97,115,101,100,95,112,121,99,115,114, - 157,0,0,0,218,17,95,82,65,87,95,77,65,71,73,67, - 95,78,85,77,66,69,82,114,158,0,0,0,114,156,0,0, - 0,114,117,0,0,0,114,150,0,0,0,114,134,0,0,0, - 114,149,0,0,0,114,165,0,0,0,114,233,0,0,0,114, - 1,0,0,0,218,19,100,111,110,116,95,119,114,105,116,101, - 95,98,121,116,101,99,111,100,101,114,171,0,0,0,114,170, - 0,0,0,114,23,0,0,0,114,226,0,0,0,41,15,114, - 118,0,0,0,114,139,0,0,0,114,107,0,0,0,114,154, - 0,0,0,114,174,0,0,0,114,157,0,0,0,90,10,104, - 97,115,104,95,98,97,115,101,100,90,12,99,104,101,99,107, - 95,115,111,117,114,99,101,114,106,0,0,0,218,2,115,116, - 114,26,0,0,0,114,151,0,0,0,114,2,0,0,0,90, - 10,98,121,116,101,115,95,100,97,116,97,90,11,99,111,100, - 101,95,111,98,106,101,99,116,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,213,0,0,0,88,3,0,0, - 115,152,0,0,0,0,7,10,1,4,1,4,1,4,1,4, - 1,4,1,2,1,12,1,12,1,12,2,2,1,14,1,12, - 1,8,2,12,1,2,1,14,1,12,1,6,3,2,1,2, - 254,6,4,2,1,12,1,16,1,12,1,6,1,12,1,12, - 1,2,255,2,2,8,254,4,3,10,1,4,1,2,1,2, - 254,4,4,8,1,2,255,6,3,2,1,2,1,2,1,6, - 1,2,1,2,251,8,7,18,1,6,2,8,1,2,255,4, - 2,6,1,2,1,2,254,6,3,10,1,10,1,12,1,12, - 1,18,1,6,255,4,2,6,1,10,1,10,1,14,2,6, - 1,6,255,4,2,2,1,18,1,14,1,6,1,122,21,83, - 111,117,114,99,101,76,111,97,100,101,114,46,103,101,116,95, - 99,111,100,101,78,41,10,114,125,0,0,0,114,124,0,0, - 0,114,126,0,0,0,114,223,0,0,0,114,224,0,0,0, - 114,226,0,0,0,114,225,0,0,0,114,229,0,0,0,114, - 233,0,0,0,114,213,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,114,221,0, - 0,0,29,3,0,0,115,14,0,0,0,8,2,8,8,8, - 14,8,10,8,7,8,10,14,8,114,221,0,0,0,99,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,4, - 0,0,0,0,0,0,0,115,92,0,0,0,101,0,90,1, - 100,0,90,2,100,1,90,3,100,2,100,3,132,0,90,4, - 100,4,100,5,132,0,90,5,100,6,100,7,132,0,90,6, - 101,7,135,0,102,1,100,8,100,9,132,8,131,1,90,8, - 101,7,100,10,100,11,132,0,131,1,90,9,100,12,100,13, - 132,0,90,10,101,7,100,14,100,15,132,0,131,1,90,11, - 135,0,4,0,90,12,83,0,41,16,218,10,70,105,108,101, - 76,111,97,100,101,114,122,103,66,97,115,101,32,102,105,108, - 101,32,108,111,97,100,101,114,32,99,108,97,115,115,32,119, - 104,105,99,104,32,105,109,112,108,101,109,101,110,116,115,32, - 116,104,101,32,108,111,97,100,101,114,32,112,114,111,116,111, - 99,111,108,32,109,101,116,104,111,100,115,32,116,104,97,116, - 10,32,32,32,32,114,101,113,117,105,114,101,32,102,105,108, - 101,32,115,121,115,116,101,109,32,117,115,97,103,101,46,99, - 3,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0, - 2,0,0,0,67,0,0,0,115,16,0,0,0,124,1,124, - 0,95,0,124,2,124,0,95,1,100,1,83,0,41,2,122, - 75,67,97,99,104,101,32,116,104,101,32,109,111,100,117,108, - 101,32,110,97,109,101,32,97,110,100,32,116,104,101,32,112, - 97,116,104,32,116,111,32,116,104,101,32,102,105,108,101,32, - 102,111,117,110,100,32,98,121,32,116,104,101,10,32,32,32, - 32,32,32,32,32,102,105,110,100,101,114,46,78,114,159,0, - 0,0,41,3,114,118,0,0,0,114,139,0,0,0,114,44, - 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,114,209,0,0,0,178,3,0,0,115,4,0,0,0, - 0,3,6,1,122,19,70,105,108,101,76,111,97,100,101,114, - 46,95,95,105,110,105,116,95,95,99,2,0,0,0,0,0, - 0,0,0,0,0,0,2,0,0,0,2,0,0,0,67,0, - 0,0,115,24,0,0,0,124,0,106,0,124,1,106,0,107, - 2,111,22,124,0,106,1,124,1,106,1,107,2,83,0,114, - 109,0,0,0,169,2,218,9,95,95,99,108,97,115,115,95, - 95,114,131,0,0,0,169,2,114,118,0,0,0,90,5,111, - 116,104,101,114,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,218,6,95,95,101,113,95,95,184,3,0,0,115, - 6,0,0,0,0,1,12,1,10,255,122,17,70,105,108,101, - 76,111,97,100,101,114,46,95,95,101,113,95,95,99,1,0, - 0,0,0,0,0,0,0,0,0,0,1,0,0,0,3,0, - 0,0,67,0,0,0,115,20,0,0,0,116,0,124,0,106, - 1,131,1,116,0,124,0,106,2,131,1,65,0,83,0,114, - 109,0,0,0,169,3,218,4,104,97,115,104,114,116,0,0, - 0,114,44,0,0,0,169,1,114,118,0,0,0,114,5,0, - 0,0,114,5,0,0,0,114,8,0,0,0,218,8,95,95, - 104,97,115,104,95,95,188,3,0,0,115,2,0,0,0,0, - 1,122,19,70,105,108,101,76,111,97,100,101,114,46,95,95, - 104,97,115,104,95,95,99,2,0,0,0,0,0,0,0,0, - 0,0,0,2,0,0,0,3,0,0,0,3,0,0,0,115, - 16,0,0,0,116,0,116,1,124,0,131,2,160,2,124,1, - 161,1,83,0,41,1,122,100,76,111,97,100,32,97,32,109, - 111,100,117,108,101,32,102,114,111,109,32,97,32,102,105,108, - 101,46,10,10,32,32,32,32,32,32,32,32,84,104,105,115, - 32,109,101,116,104,111,100,32,105,115,32,100,101,112,114,101, - 99,97,116,101,100,46,32,32,85,115,101,32,101,120,101,99, - 95,109,111,100,117,108,101,40,41,32,105,110,115,116,101,97, - 100,46,10,10,32,32,32,32,32,32,32,32,41,3,218,5, - 115,117,112,101,114,114,239,0,0,0,114,220,0,0,0,114, - 219,0,0,0,169,1,114,241,0,0,0,114,5,0,0,0, - 114,8,0,0,0,114,220,0,0,0,191,3,0,0,115,2, - 0,0,0,0,10,122,22,70,105,108,101,76,111,97,100,101, - 114,46,108,111,97,100,95,109,111,100,117,108,101,99,2,0, - 0,0,0,0,0,0,0,0,0,0,2,0,0,0,1,0, - 0,0,67,0,0,0,115,6,0,0,0,124,0,106,0,83, - 0,169,1,122,58,82,101,116,117,114,110,32,116,104,101,32, - 112,97,116,104,32,116,111,32,116,104,101,32,115,111,117,114, - 99,101,32,102,105,108,101,32,97,115,32,102,111,117,110,100, - 32,98,121,32,116,104,101,32,102,105,110,100,101,114,46,114, - 48,0,0,0,114,219,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,179,0,0,0,203,3,0, - 0,115,2,0,0,0,0,3,122,23,70,105,108,101,76,111, - 97,100,101,114,46,103,101,116,95,102,105,108,101,110,97,109, - 101,99,2,0,0,0,0,0,0,0,0,0,0,0,3,0, - 0,0,8,0,0,0,67,0,0,0,115,126,0,0,0,116, - 0,124,0,116,1,116,2,102,2,131,2,114,70,116,3,160, - 4,116,5,124,1,131,1,161,1,143,24,125,2,124,2,160, - 6,161,0,87,0,2,0,100,1,4,0,4,0,131,3,1, - 0,83,0,49,0,115,58,48,0,1,0,1,0,1,0,89, - 0,1,0,110,52,116,3,160,7,124,1,100,2,161,2,143, - 24,125,2,124,2,160,6,161,0,87,0,2,0,100,1,4, - 0,4,0,131,3,1,0,83,0,49,0,115,112,48,0,1, - 0,1,0,1,0,89,0,1,0,100,1,83,0,41,3,122, - 39,82,101,116,117,114,110,32,116,104,101,32,100,97,116,97, - 32,102,114,111,109,32,112,97,116,104,32,97,115,32,114,97, - 119,32,98,121,116,101,115,46,78,218,1,114,41,8,114,161, - 0,0,0,114,221,0,0,0,218,19,69,120,116,101,110,115, - 105,111,110,70,105,108,101,76,111,97,100,101,114,114,64,0, - 0,0,90,9,111,112,101,110,95,99,111,100,101,114,84,0, - 0,0,90,4,114,101,97,100,114,65,0,0,0,41,3,114, - 118,0,0,0,114,44,0,0,0,114,68,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,114,227,0, - 0,0,208,3,0,0,115,10,0,0,0,0,2,14,1,16, - 1,40,2,14,1,122,19,70,105,108,101,76,111,97,100,101, - 114,46,103,101,116,95,100,97,116,97,99,2,0,0,0,0, - 0,0,0,0,0,0,0,3,0,0,0,2,0,0,0,67, - 0,0,0,115,20,0,0,0,100,1,100,2,108,0,109,1, - 125,2,1,0,124,2,124,0,131,1,83,0,41,3,78,114, - 73,0,0,0,41,1,218,10,70,105,108,101,82,101,97,100, - 101,114,41,2,90,17,105,109,112,111,114,116,108,105,98,46, - 114,101,97,100,101,114,115,114,253,0,0,0,41,3,114,118, - 0,0,0,114,216,0,0,0,114,253,0,0,0,114,5,0, - 0,0,114,5,0,0,0,114,8,0,0,0,218,19,103,101, - 116,95,114,101,115,111,117,114,99,101,95,114,101,97,100,101, - 114,217,3,0,0,115,4,0,0,0,0,2,12,1,122,30, - 70,105,108,101,76,111,97,100,101,114,46,103,101,116,95,114, - 101,115,111,117,114,99,101,95,114,101,97,100,101,114,41,13, - 114,125,0,0,0,114,124,0,0,0,114,126,0,0,0,114, - 127,0,0,0,114,209,0,0,0,114,243,0,0,0,114,247, - 0,0,0,114,136,0,0,0,114,220,0,0,0,114,179,0, - 0,0,114,227,0,0,0,114,254,0,0,0,90,13,95,95, - 99,108,97,115,115,99,101,108,108,95,95,114,5,0,0,0, - 114,5,0,0,0,114,249,0,0,0,114,8,0,0,0,114, - 239,0,0,0,173,3,0,0,115,22,0,0,0,8,2,4, - 3,8,6,8,4,8,3,2,1,14,11,2,1,10,4,8, - 9,2,1,114,239,0,0,0,99,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,3,0,0,0,64,0,0, - 0,115,46,0,0,0,101,0,90,1,100,0,90,2,100,1, - 90,3,100,2,100,3,132,0,90,4,100,4,100,5,132,0, - 90,5,100,6,100,7,156,1,100,8,100,9,132,2,90,6, - 100,10,83,0,41,11,218,16,83,111,117,114,99,101,70,105, - 108,101,76,111,97,100,101,114,122,62,67,111,110,99,114,101, - 116,101,32,105,109,112,108,101,109,101,110,116,97,116,105,111, - 110,32,111,102,32,83,111,117,114,99,101,76,111,97,100,101, - 114,32,117,115,105,110,103,32,116,104,101,32,102,105,108,101, - 32,115,121,115,116,101,109,46,99,2,0,0,0,0,0,0, - 0,0,0,0,0,3,0,0,0,3,0,0,0,67,0,0, - 0,115,22,0,0,0,116,0,124,1,131,1,125,2,124,2, - 106,1,124,2,106,2,100,1,156,2,83,0,41,2,122,33, - 82,101,116,117,114,110,32,116,104,101,32,109,101,116,97,100, - 97,116,97,32,102,111,114,32,116,104,101,32,112,97,116,104, - 46,41,2,114,169,0,0,0,114,234,0,0,0,41,3,114, - 49,0,0,0,218,8,115,116,95,109,116,105,109,101,90,7, - 115,116,95,115,105,122,101,41,3,114,118,0,0,0,114,44, - 0,0,0,114,238,0,0,0,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,224,0,0,0,227,3,0,0, - 115,4,0,0,0,0,2,8,1,122,27,83,111,117,114,99, - 101,70,105,108,101,76,111,97,100,101,114,46,112,97,116,104, - 95,115,116,97,116,115,99,4,0,0,0,0,0,0,0,0, - 0,0,0,5,0,0,0,5,0,0,0,67,0,0,0,115, - 24,0,0,0,116,0,124,1,131,1,125,4,124,0,106,1, - 124,2,124,3,124,4,100,1,141,3,83,0,41,2,78,169, - 1,218,5,95,109,111,100,101,41,2,114,114,0,0,0,114, - 225,0,0,0,41,5,114,118,0,0,0,114,107,0,0,0, - 114,106,0,0,0,114,26,0,0,0,114,52,0,0,0,114, - 5,0,0,0,114,5,0,0,0,114,8,0,0,0,114,226, - 0,0,0,232,3,0,0,115,4,0,0,0,0,2,8,1, - 122,32,83,111,117,114,99,101,70,105,108,101,76,111,97,100, - 101,114,46,95,99,97,99,104,101,95,98,121,116,101,99,111, - 100,101,114,60,0,0,0,114,1,1,0,0,99,3,0,0, - 0,0,0,0,0,1,0,0,0,9,0,0,0,11,0,0, - 0,67,0,0,0,115,252,0,0,0,116,0,124,1,131,1, - 92,2,125,4,125,5,103,0,125,6,124,4,114,52,116,1, - 124,4,131,1,115,52,116,0,124,4,131,1,92,2,125,4, - 125,7,124,6,160,2,124,7,161,1,1,0,113,16,116,3, - 124,6,131,1,68,0,93,104,125,7,116,4,124,4,124,7, - 131,2,125,4,122,14,116,5,160,6,124,4,161,1,1,0, - 87,0,113,60,4,0,116,7,121,110,1,0,1,0,1,0, - 89,0,113,60,89,0,113,60,4,0,116,8,121,162,1,0, - 125,8,1,0,122,30,116,9,160,10,100,1,124,4,124,8, - 161,3,1,0,87,0,89,0,100,2,125,8,126,8,1,0, - 100,2,83,0,100,2,125,8,126,8,48,0,48,0,113,60, - 122,28,116,11,124,1,124,2,124,3,131,3,1,0,116,9, - 160,10,100,3,124,1,161,2,1,0,87,0,110,52,4,0, - 116,8,144,0,121,246,1,0,125,8,1,0,122,26,116,9, - 160,10,100,1,124,1,124,8,161,3,1,0,87,0,89,0, - 100,2,125,8,126,8,110,10,100,2,125,8,126,8,48,0, - 48,0,100,2,83,0,41,4,122,27,87,114,105,116,101,32, - 98,121,116,101,115,32,100,97,116,97,32,116,111,32,97,32, - 102,105,108,101,46,122,27,99,111,117,108,100,32,110,111,116, - 32,99,114,101,97,116,101,32,123,33,114,125,58,32,123,33, - 114,125,78,122,12,99,114,101,97,116,101,100,32,123,33,114, - 125,41,12,114,47,0,0,0,114,56,0,0,0,114,186,0, - 0,0,114,42,0,0,0,114,38,0,0,0,114,4,0,0, - 0,90,5,109,107,100,105,114,218,15,70,105,108,101,69,120, - 105,115,116,115,69,114,114,111,114,114,50,0,0,0,114,134, - 0,0,0,114,149,0,0,0,114,69,0,0,0,41,9,114, - 118,0,0,0,114,44,0,0,0,114,26,0,0,0,114,2, - 1,0,0,218,6,112,97,114,101,110,116,114,96,0,0,0, - 114,37,0,0,0,114,33,0,0,0,114,228,0,0,0,114, - 5,0,0,0,114,5,0,0,0,114,8,0,0,0,114,225, - 0,0,0,237,3,0,0,115,46,0,0,0,0,2,12,1, - 4,2,12,1,12,1,12,2,12,1,10,1,2,1,14,1, - 12,2,8,1,14,3,6,1,4,255,4,2,28,1,2,1, - 12,1,16,1,16,2,8,1,2,255,122,25,83,111,117,114, - 99,101,70,105,108,101,76,111,97,100,101,114,46,115,101,116, - 95,100,97,116,97,78,41,7,114,125,0,0,0,114,124,0, - 0,0,114,126,0,0,0,114,127,0,0,0,114,224,0,0, - 0,114,226,0,0,0,114,225,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 255,0,0,0,223,3,0,0,115,8,0,0,0,8,2,4, - 2,8,5,8,5,114,255,0,0,0,99,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,64, - 0,0,0,115,32,0,0,0,101,0,90,1,100,0,90,2, - 100,1,90,3,100,2,100,3,132,0,90,4,100,4,100,5, - 132,0,90,5,100,6,83,0,41,7,218,20,83,111,117,114, - 99,101,108,101,115,115,70,105,108,101,76,111,97,100,101,114, - 122,45,76,111,97,100,101,114,32,119,104,105,99,104,32,104, - 97,110,100,108,101,115,32,115,111,117,114,99,101,108,101,115, - 115,32,102,105,108,101,32,105,109,112,111,114,116,115,46,99, - 2,0,0,0,0,0,0,0,0,0,0,0,5,0,0,0, - 5,0,0,0,67,0,0,0,115,68,0,0,0,124,0,160, - 0,124,1,161,1,125,2,124,0,160,1,124,2,161,1,125, - 3,124,1,124,2,100,1,156,2,125,4,116,2,124,3,124, - 1,124,4,131,3,1,0,116,3,116,4,124,3,131,1,100, - 2,100,0,133,2,25,0,124,1,124,2,100,3,141,3,83, - 0,41,4,78,114,159,0,0,0,114,145,0,0,0,41,2, - 114,116,0,0,0,114,106,0,0,0,41,5,114,179,0,0, - 0,114,227,0,0,0,114,152,0,0,0,114,165,0,0,0, - 114,235,0,0,0,41,5,114,118,0,0,0,114,139,0,0, - 0,114,44,0,0,0,114,26,0,0,0,114,151,0,0,0, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 213,0,0,0,16,4,0,0,115,22,0,0,0,0,1,10, - 1,10,4,2,1,2,254,6,4,12,1,2,1,14,1,2, - 1,2,253,122,29,83,111,117,114,99,101,108,101,115,115,70, - 105,108,101,76,111,97,100,101,114,46,103,101,116,95,99,111, - 100,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,1,0,0,0,67,0,0,0,115,4,0,0,0, - 100,1,83,0,41,2,122,39,82,101,116,117,114,110,32,78, - 111,110,101,32,97,115,32,116,104,101,114,101,32,105,115,32, - 110,111,32,115,111,117,114,99,101,32,99,111,100,101,46,78, - 114,5,0,0,0,114,219,0,0,0,114,5,0,0,0,114, - 5,0,0,0,114,8,0,0,0,114,229,0,0,0,32,4, - 0,0,115,2,0,0,0,0,2,122,31,83,111,117,114,99, - 101,108,101,115,115,70,105,108,101,76,111,97,100,101,114,46, - 103,101,116,95,115,111,117,114,99,101,78,41,6,114,125,0, - 0,0,114,124,0,0,0,114,126,0,0,0,114,127,0,0, - 0,114,213,0,0,0,114,229,0,0,0,114,5,0,0,0, + 32,32,32,78,114,5,0,0,0,41,3,114,118,0,0,0, + 114,44,0,0,0,114,26,0,0,0,114,5,0,0,0,114, + 5,0,0,0,114,8,0,0,0,114,225,0,0,0,63,3, + 0,0,115,2,0,0,0,0,1,122,21,83,111,117,114,99, + 101,76,111,97,100,101,114,46,115,101,116,95,100,97,116,97, + 99,2,0,0,0,0,0,0,0,0,0,0,0,5,0,0, + 0,10,0,0,0,67,0,0,0,115,84,0,0,0,124,0, + 160,0,124,1,161,1,125,2,122,14,124,0,160,1,124,2, + 161,1,125,3,87,0,110,50,4,0,116,2,121,74,1,0, + 125,4,1,0,122,26,116,3,100,1,124,1,100,2,141,2, + 124,4,130,2,87,0,89,0,100,3,125,4,126,4,110,10, + 100,3,125,4,126,4,48,0,48,0,116,4,124,3,131,1, + 83,0,41,4,122,52,67,111,110,99,114,101,116,101,32,105, + 109,112,108,101,109,101,110,116,97,116,105,111,110,32,111,102, + 32,73,110,115,112,101,99,116,76,111,97,100,101,114,46,103, + 101,116,95,115,111,117,114,99,101,46,122,39,115,111,117,114, + 99,101,32,110,111,116,32,97,118,97,105,108,97,98,108,101, + 32,116,104,114,111,117,103,104,32,103,101,116,95,100,97,116, + 97,40,41,114,115,0,0,0,78,41,5,114,179,0,0,0, + 218,8,103,101,116,95,100,97,116,97,114,50,0,0,0,114, + 117,0,0,0,114,176,0,0,0,41,5,114,118,0,0,0, + 114,139,0,0,0,114,44,0,0,0,114,174,0,0,0,218, + 3,101,120,99,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,218,10,103,101,116,95,115,111,117,114,99,101,70, + 3,0,0,115,20,0,0,0,0,2,10,1,2,1,14,1, + 14,1,4,1,2,255,4,1,2,255,24,2,122,23,83,111, + 117,114,99,101,76,111,97,100,101,114,46,103,101,116,95,115, + 111,117,114,99,101,114,104,0,0,0,41,1,218,9,95,111, + 112,116,105,109,105,122,101,99,3,0,0,0,0,0,0,0, + 1,0,0,0,4,0,0,0,8,0,0,0,67,0,0,0, + 115,22,0,0,0,116,0,106,1,116,2,124,1,124,2,100, + 1,100,2,124,3,100,3,141,6,83,0,41,4,122,130,82, + 101,116,117,114,110,32,116,104,101,32,99,111,100,101,32,111, + 98,106,101,99,116,32,99,111,109,112,105,108,101,100,32,102, + 114,111,109,32,115,111,117,114,99,101,46,10,10,32,32,32, + 32,32,32,32,32,84,104,101,32,39,100,97,116,97,39,32, + 97,114,103,117,109,101,110,116,32,99,97,110,32,98,101,32, + 97,110,121,32,111,98,106,101,99,116,32,116,121,112,101,32, + 116,104,97,116,32,99,111,109,112,105,108,101,40,41,32,115, + 117,112,112,111,114,116,115,46,10,32,32,32,32,32,32,32, + 32,114,215,0,0,0,84,41,2,218,12,100,111,110,116,95, + 105,110,104,101,114,105,116,114,83,0,0,0,41,3,114,134, + 0,0,0,114,214,0,0,0,218,7,99,111,109,112,105,108, + 101,41,4,114,118,0,0,0,114,26,0,0,0,114,44,0, + 0,0,114,230,0,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,218,14,115,111,117,114,99,101,95,116, + 111,95,99,111,100,101,80,3,0,0,115,6,0,0,0,0, + 5,12,1,4,255,122,27,83,111,117,114,99,101,76,111,97, + 100,101,114,46,115,111,117,114,99,101,95,116,111,95,99,111, + 100,101,99,2,0,0,0,0,0,0,0,0,0,0,0,15, + 0,0,0,9,0,0,0,67,0,0,0,115,24,2,0,0, + 124,0,160,0,124,1,161,1,125,2,100,1,125,3,100,1, + 125,4,100,1,125,5,100,2,125,6,100,3,125,7,122,12, + 116,1,124,2,131,1,125,8,87,0,110,24,4,0,116,2, + 121,66,1,0,1,0,1,0,100,1,125,8,89,0,144,1, + 110,42,48,0,122,14,124,0,160,3,124,2,161,1,125,9, + 87,0,110,20,4,0,116,4,121,102,1,0,1,0,1,0, + 89,0,144,1,110,6,48,0,116,5,124,9,100,4,25,0, + 131,1,125,3,122,14,124,0,160,6,124,8,161,1,125,10, + 87,0,110,18,4,0,116,4,121,148,1,0,1,0,1,0, + 89,0,110,216,48,0,124,1,124,8,100,5,156,2,125,11, + 122,148,116,7,124,10,124,1,124,11,131,3,125,12,116,8, + 124,10,131,1,100,6,100,1,133,2,25,0,125,13,124,12, + 100,7,64,0,100,8,107,3,125,6,124,6,144,1,114,30, + 124,12,100,9,64,0,100,8,107,3,125,7,116,9,106,10, + 100,10,107,3,144,1,114,50,124,7,115,248,116,9,106,10, + 100,11,107,2,144,1,114,50,124,0,160,6,124,2,161,1, + 125,4,116,9,160,11,116,12,124,4,161,2,125,5,116,13, + 124,10,124,5,124,1,124,11,131,4,1,0,110,20,116,14, + 124,10,124,3,124,9,100,12,25,0,124,1,124,11,131,5, + 1,0,87,0,110,24,4,0,116,15,116,16,102,2,144,1, + 121,76,1,0,1,0,1,0,89,0,110,32,48,0,116,17, + 160,18,100,13,124,8,124,2,161,3,1,0,116,19,124,13, + 124,1,124,8,124,2,100,14,141,4,83,0,124,4,100,1, + 117,0,144,1,114,128,124,0,160,6,124,2,161,1,125,4, + 124,0,160,20,124,4,124,2,161,2,125,14,116,17,160,18, + 100,15,124,2,161,2,1,0,116,21,106,22,144,2,115,20, + 124,8,100,1,117,1,144,2,114,20,124,3,100,1,117,1, + 144,2,114,20,124,6,144,1,114,220,124,5,100,1,117,0, + 144,1,114,206,116,9,160,11,124,4,161,1,125,5,116,23, + 124,14,124,5,124,7,131,3,125,10,110,16,116,24,124,14, + 124,3,116,25,124,4,131,1,131,3,125,10,122,18,124,0, + 160,26,124,2,124,8,124,10,161,3,1,0,87,0,110,20, + 4,0,116,2,144,2,121,18,1,0,1,0,1,0,89,0, + 110,2,48,0,124,14,83,0,41,16,122,190,67,111,110,99, + 114,101,116,101,32,105,109,112,108,101,109,101,110,116,97,116, + 105,111,110,32,111,102,32,73,110,115,112,101,99,116,76,111, + 97,100,101,114,46,103,101,116,95,99,111,100,101,46,10,10, + 32,32,32,32,32,32,32,32,82,101,97,100,105,110,103,32, + 111,102,32,98,121,116,101,99,111,100,101,32,114,101,113,117, + 105,114,101,115,32,112,97,116,104,95,115,116,97,116,115,32, + 116,111,32,98,101,32,105,109,112,108,101,109,101,110,116,101, + 100,46,32,84,111,32,119,114,105,116,101,10,32,32,32,32, + 32,32,32,32,98,121,116,101,99,111,100,101,44,32,115,101, + 116,95,100,97,116,97,32,109,117,115,116,32,97,108,115,111, + 32,98,101,32,105,109,112,108,101,109,101,110,116,101,100,46, + 10,10,32,32,32,32,32,32,32,32,78,70,84,114,169,0, + 0,0,114,159,0,0,0,114,145,0,0,0,114,39,0,0, + 0,114,73,0,0,0,114,28,0,0,0,90,5,110,101,118, + 101,114,90,6,97,108,119,97,121,115,218,4,115,105,122,101, + 122,13,123,125,32,109,97,116,99,104,101,115,32,123,125,41, + 3,114,116,0,0,0,114,106,0,0,0,114,107,0,0,0, + 122,19,99,111,100,101,32,111,98,106,101,99,116,32,102,114, + 111,109,32,123,125,41,27,114,179,0,0,0,114,97,0,0, + 0,114,82,0,0,0,114,224,0,0,0,114,50,0,0,0, + 114,18,0,0,0,114,227,0,0,0,114,152,0,0,0,218, + 10,109,101,109,111,114,121,118,105,101,119,114,163,0,0,0, + 90,21,99,104,101,99,107,95,104,97,115,104,95,98,97,115, + 101,100,95,112,121,99,115,114,157,0,0,0,218,17,95,82, + 65,87,95,77,65,71,73,67,95,78,85,77,66,69,82,114, + 158,0,0,0,114,156,0,0,0,114,117,0,0,0,114,150, + 0,0,0,114,134,0,0,0,114,149,0,0,0,114,165,0, + 0,0,114,233,0,0,0,114,1,0,0,0,218,19,100,111, + 110,116,95,119,114,105,116,101,95,98,121,116,101,99,111,100, + 101,114,171,0,0,0,114,170,0,0,0,114,23,0,0,0, + 114,226,0,0,0,41,15,114,118,0,0,0,114,139,0,0, + 0,114,107,0,0,0,114,154,0,0,0,114,174,0,0,0, + 114,157,0,0,0,90,10,104,97,115,104,95,98,97,115,101, + 100,90,12,99,104,101,99,107,95,115,111,117,114,99,101,114, + 106,0,0,0,218,2,115,116,114,26,0,0,0,114,151,0, + 0,0,114,2,0,0,0,90,10,98,121,116,101,115,95,100, + 97,116,97,90,11,99,111,100,101,95,111,98,106,101,99,116, 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 5,1,0,0,12,4,0,0,115,6,0,0,0,8,2,4, - 2,8,16,114,5,1,0,0,99,0,0,0,0,0,0,0, - 0,0,0,0,0,0,0,0,0,3,0,0,0,64,0,0, - 0,115,92,0,0,0,101,0,90,1,100,0,90,2,100,1, - 90,3,100,2,100,3,132,0,90,4,100,4,100,5,132,0, - 90,5,100,6,100,7,132,0,90,6,100,8,100,9,132,0, - 90,7,100,10,100,11,132,0,90,8,100,12,100,13,132,0, - 90,9,100,14,100,15,132,0,90,10,100,16,100,17,132,0, - 90,11,101,12,100,18,100,19,132,0,131,1,90,13,100,20, - 83,0,41,21,114,252,0,0,0,122,93,76,111,97,100,101, - 114,32,102,111,114,32,101,120,116,101,110,115,105,111,110,32, - 109,111,100,117,108,101,115,46,10,10,32,32,32,32,84,104, - 101,32,99,111,110,115,116,114,117,99,116,111,114,32,105,115, - 32,100,101,115,105,103,110,101,100,32,116,111,32,119,111,114, - 107,32,119,105,116,104,32,70,105,108,101,70,105,110,100,101, - 114,46,10,10,32,32,32,32,99,3,0,0,0,0,0,0, - 0,0,0,0,0,3,0,0,0,2,0,0,0,67,0,0, - 0,115,16,0,0,0,124,1,124,0,95,0,124,2,124,0, - 95,1,100,0,83,0,114,109,0,0,0,114,159,0,0,0, - 41,3,114,118,0,0,0,114,116,0,0,0,114,44,0,0, - 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 114,209,0,0,0,49,4,0,0,115,4,0,0,0,0,1, - 6,1,122,28,69,120,116,101,110,115,105,111,110,70,105,108, - 101,76,111,97,100,101,114,46,95,95,105,110,105,116,95,95, - 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,2,0,0,0,67,0,0,0,115,24,0,0,0,124,0, - 106,0,124,1,106,0,107,2,111,22,124,0,106,1,124,1, - 106,1,107,2,83,0,114,109,0,0,0,114,240,0,0,0, - 114,242,0,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,243,0,0,0,53,4,0,0,115,6,0, - 0,0,0,1,12,1,10,255,122,26,69,120,116,101,110,115, - 105,111,110,70,105,108,101,76,111,97,100,101,114,46,95,95, - 101,113,95,95,99,1,0,0,0,0,0,0,0,0,0,0, - 0,1,0,0,0,3,0,0,0,67,0,0,0,115,20,0, - 0,0,116,0,124,0,106,1,131,1,116,0,124,0,106,2, - 131,1,65,0,83,0,114,109,0,0,0,114,244,0,0,0, - 114,246,0,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,247,0,0,0,57,4,0,0,115,2,0, - 0,0,0,1,122,28,69,120,116,101,110,115,105,111,110,70, - 105,108,101,76,111,97,100,101,114,46,95,95,104,97,115,104, - 95,95,99,2,0,0,0,0,0,0,0,0,0,0,0,3, - 0,0,0,5,0,0,0,67,0,0,0,115,36,0,0,0, - 116,0,160,1,116,2,106,3,124,1,161,2,125,2,116,0, - 160,4,100,1,124,1,106,5,124,0,106,6,161,3,1,0, - 124,2,83,0,41,2,122,38,67,114,101,97,116,101,32,97, - 110,32,117,110,105,116,105,97,108,105,122,101,100,32,101,120, - 116,101,110,115,105,111,110,32,109,111,100,117,108,101,122,38, - 101,120,116,101,110,115,105,111,110,32,109,111,100,117,108,101, - 32,123,33,114,125,32,108,111,97,100,101,100,32,102,114,111, - 109,32,123,33,114,125,41,7,114,134,0,0,0,114,214,0, - 0,0,114,163,0,0,0,90,14,99,114,101,97,116,101,95, - 100,121,110,97,109,105,99,114,149,0,0,0,114,116,0,0, - 0,114,44,0,0,0,41,3,114,118,0,0,0,114,187,0, - 0,0,114,216,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,212,0,0,0,60,4,0,0,115, - 14,0,0,0,0,2,4,1,6,255,4,2,6,1,8,255, - 4,2,122,33,69,120,116,101,110,115,105,111,110,70,105,108, - 101,76,111,97,100,101,114,46,99,114,101,97,116,101,95,109, - 111,100,117,108,101,99,2,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,5,0,0,0,67,0,0,0,115,36, - 0,0,0,116,0,160,1,116,2,106,3,124,1,161,2,1, - 0,116,0,160,4,100,1,124,0,106,5,124,0,106,6,161, - 3,1,0,100,2,83,0,41,3,122,30,73,110,105,116,105, - 97,108,105,122,101,32,97,110,32,101,120,116,101,110,115,105, - 111,110,32,109,111,100,117,108,101,122,40,101,120,116,101,110, - 115,105,111,110,32,109,111,100,117,108,101,32,123,33,114,125, - 32,101,120,101,99,117,116,101,100,32,102,114,111,109,32,123, - 33,114,125,78,41,7,114,134,0,0,0,114,214,0,0,0, - 114,163,0,0,0,90,12,101,120,101,99,95,100,121,110,97, - 109,105,99,114,149,0,0,0,114,116,0,0,0,114,44,0, - 0,0,169,2,114,118,0,0,0,114,216,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,114,217,0, - 0,0,68,4,0,0,115,8,0,0,0,0,2,14,1,6, - 1,8,255,122,31,69,120,116,101,110,115,105,111,110,70,105, - 108,101,76,111,97,100,101,114,46,101,120,101,99,95,109,111, - 100,117,108,101,99,2,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,4,0,0,0,3,0,0,0,115,36,0, - 0,0,116,0,124,0,106,1,131,1,100,1,25,0,137,0, - 116,2,135,0,102,1,100,2,100,3,132,8,116,3,68,0, - 131,1,131,1,83,0,41,4,122,49,82,101,116,117,114,110, - 32,84,114,117,101,32,105,102,32,116,104,101,32,101,120,116, - 101,110,115,105,111,110,32,109,111,100,117,108,101,32,105,115, - 32,97,32,112,97,99,107,97,103,101,46,114,39,0,0,0, - 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, - 0,4,0,0,0,51,0,0,0,115,26,0,0,0,124,0, - 93,18,125,1,136,0,100,0,124,1,23,0,107,2,86,0, - 1,0,113,2,100,1,83,0,41,2,114,209,0,0,0,78, - 114,5,0,0,0,169,2,114,32,0,0,0,218,6,115,117, - 102,102,105,120,169,1,90,9,102,105,108,101,95,110,97,109, - 101,114,5,0,0,0,114,8,0,0,0,218,9,60,103,101, - 110,101,120,112,114,62,77,4,0,0,115,4,0,0,0,4, - 1,2,255,122,49,69,120,116,101,110,115,105,111,110,70,105, - 108,101,76,111,97,100,101,114,46,105,115,95,112,97,99,107, - 97,103,101,46,60,108,111,99,97,108,115,62,46,60,103,101, - 110,101,120,112,114,62,41,4,114,47,0,0,0,114,44,0, - 0,0,218,3,97,110,121,218,18,69,88,84,69,78,83,73, - 79,78,95,83,85,70,70,73,88,69,83,114,219,0,0,0, - 114,5,0,0,0,114,9,1,0,0,114,8,0,0,0,114, - 182,0,0,0,74,4,0,0,115,8,0,0,0,0,2,14, - 1,12,1,2,255,122,30,69,120,116,101,110,115,105,111,110, - 70,105,108,101,76,111,97,100,101,114,46,105,115,95,112,97, - 99,107,97,103,101,99,2,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,1,0,0,0,67,0,0,0,115,4, - 0,0,0,100,1,83,0,41,2,122,63,82,101,116,117,114, - 110,32,78,111,110,101,32,97,115,32,97,110,32,101,120,116, - 101,110,115,105,111,110,32,109,111,100,117,108,101,32,99,97, - 110,110,111,116,32,99,114,101,97,116,101,32,97,32,99,111, - 100,101,32,111,98,106,101,99,116,46,78,114,5,0,0,0, - 114,219,0,0,0,114,5,0,0,0,114,5,0,0,0,114, - 8,0,0,0,114,213,0,0,0,80,4,0,0,115,2,0, - 0,0,0,2,122,28,69,120,116,101,110,115,105,111,110,70, - 105,108,101,76,111,97,100,101,114,46,103,101,116,95,99,111, - 100,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,1,0,0,0,67,0,0,0,115,4,0,0,0, - 100,1,83,0,41,2,122,53,82,101,116,117,114,110,32,78, - 111,110,101,32,97,115,32,101,120,116,101,110,115,105,111,110, - 32,109,111,100,117,108,101,115,32,104,97,118,101,32,110,111, - 32,115,111,117,114,99,101,32,99,111,100,101,46,78,114,5, - 0,0,0,114,219,0,0,0,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,229,0,0,0,84,4,0,0, - 115,2,0,0,0,0,2,122,30,69,120,116,101,110,115,105, - 111,110,70,105,108,101,76,111,97,100,101,114,46,103,101,116, - 95,115,111,117,114,99,101,99,2,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,1,0,0,0,67,0,0,0, - 115,6,0,0,0,124,0,106,0,83,0,114,250,0,0,0, - 114,48,0,0,0,114,219,0,0,0,114,5,0,0,0,114, - 5,0,0,0,114,8,0,0,0,114,179,0,0,0,88,4, - 0,0,115,2,0,0,0,0,3,122,32,69,120,116,101,110, - 115,105,111,110,70,105,108,101,76,111,97,100,101,114,46,103, - 101,116,95,102,105,108,101,110,97,109,101,78,41,14,114,125, - 0,0,0,114,124,0,0,0,114,126,0,0,0,114,127,0, - 0,0,114,209,0,0,0,114,243,0,0,0,114,247,0,0, - 0,114,212,0,0,0,114,217,0,0,0,114,182,0,0,0, - 114,213,0,0,0,114,229,0,0,0,114,136,0,0,0,114, - 179,0,0,0,114,5,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,252,0,0,0,41,4,0, - 0,115,22,0,0,0,8,2,4,6,8,4,8,4,8,3, - 8,8,8,6,8,6,8,4,8,4,2,1,114,252,0,0, - 0,99,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,0,2,0,0,0,64,0,0,0,115,104,0,0,0,101, - 0,90,1,100,0,90,2,100,1,90,3,100,2,100,3,132, - 0,90,4,100,4,100,5,132,0,90,5,100,6,100,7,132, - 0,90,6,100,8,100,9,132,0,90,7,100,10,100,11,132, - 0,90,8,100,12,100,13,132,0,90,9,100,14,100,15,132, - 0,90,10,100,16,100,17,132,0,90,11,100,18,100,19,132, - 0,90,12,100,20,100,21,132,0,90,13,100,22,100,23,132, - 0,90,14,100,24,83,0,41,25,218,14,95,78,97,109,101, - 115,112,97,99,101,80,97,116,104,97,38,1,0,0,82,101, - 112,114,101,115,101,110,116,115,32,97,32,110,97,109,101,115, - 112,97,99,101,32,112,97,99,107,97,103,101,39,115,32,112, - 97,116,104,46,32,32,73,116,32,117,115,101,115,32,116,104, - 101,32,109,111,100,117,108,101,32,110,97,109,101,10,32,32, - 32,32,116,111,32,102,105,110,100,32,105,116,115,32,112,97, - 114,101,110,116,32,109,111,100,117,108,101,44,32,97,110,100, - 32,102,114,111,109,32,116,104,101,114,101,32,105,116,32,108, - 111,111,107,115,32,117,112,32,116,104,101,32,112,97,114,101, - 110,116,39,115,10,32,32,32,32,95,95,112,97,116,104,95, - 95,46,32,32,87,104,101,110,32,116,104,105,115,32,99,104, - 97,110,103,101,115,44,32,116,104,101,32,109,111,100,117,108, - 101,39,115,32,111,119,110,32,112,97,116,104,32,105,115,32, - 114,101,99,111,109,112,117,116,101,100,44,10,32,32,32,32, - 117,115,105,110,103,32,112,97,116,104,95,102,105,110,100,101, - 114,46,32,32,70,111,114,32,116,111,112,45,108,101,118,101, - 108,32,109,111,100,117,108,101,115,44,32,116,104,101,32,112, - 97,114,101,110,116,32,109,111,100,117,108,101,39,115,32,112, - 97,116,104,10,32,32,32,32,105,115,32,115,121,115,46,112, - 97,116,104,46,99,4,0,0,0,0,0,0,0,0,0,0, - 0,4,0,0,0,3,0,0,0,67,0,0,0,115,36,0, - 0,0,124,1,124,0,95,0,124,2,124,0,95,1,116,2, - 124,0,160,3,161,0,131,1,124,0,95,4,124,3,124,0, - 95,5,100,0,83,0,114,109,0,0,0,41,6,218,5,95, - 110,97,109,101,218,5,95,112,97,116,104,114,111,0,0,0, - 218,16,95,103,101,116,95,112,97,114,101,110,116,95,112,97, - 116,104,218,17,95,108,97,115,116,95,112,97,114,101,110,116, - 95,112,97,116,104,218,12,95,112,97,116,104,95,102,105,110, - 100,101,114,169,4,114,118,0,0,0,114,116,0,0,0,114, - 44,0,0,0,90,11,112,97,116,104,95,102,105,110,100,101, - 114,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 114,209,0,0,0,101,4,0,0,115,8,0,0,0,0,1, - 6,1,6,1,14,1,122,23,95,78,97,109,101,115,112,97, - 99,101,80,97,116,104,46,95,95,105,110,105,116,95,95,99, - 1,0,0,0,0,0,0,0,0,0,0,0,4,0,0,0, - 3,0,0,0,67,0,0,0,115,38,0,0,0,124,0,106, - 0,160,1,100,1,161,1,92,3,125,1,125,2,125,3,124, - 2,100,2,107,2,114,30,100,3,83,0,124,1,100,4,102, - 2,83,0,41,5,122,62,82,101,116,117,114,110,115,32,97, - 32,116,117,112,108,101,32,111,102,32,40,112,97,114,101,110, - 116,45,109,111,100,117,108,101,45,110,97,109,101,44,32,112, - 97,114,101,110,116,45,112,97,116,104,45,97,116,116,114,45, - 110,97,109,101,41,114,71,0,0,0,114,40,0,0,0,41, - 2,114,1,0,0,0,114,44,0,0,0,90,8,95,95,112, - 97,116,104,95,95,41,2,114,14,1,0,0,114,41,0,0, - 0,41,4,114,118,0,0,0,114,4,1,0,0,218,3,100, - 111,116,90,2,109,101,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,218,23,95,102,105,110,100,95,112,97,114, - 101,110,116,95,112,97,116,104,95,110,97,109,101,115,107,4, - 0,0,115,8,0,0,0,0,2,18,1,8,2,4,3,122, - 38,95,78,97,109,101,115,112,97,99,101,80,97,116,104,46, - 95,102,105,110,100,95,112,97,114,101,110,116,95,112,97,116, - 104,95,110,97,109,101,115,99,1,0,0,0,0,0,0,0, - 0,0,0,0,3,0,0,0,3,0,0,0,67,0,0,0, - 115,28,0,0,0,124,0,160,0,161,0,92,2,125,1,125, - 2,116,1,116,2,106,3,124,1,25,0,124,2,131,2,83, - 0,114,109,0,0,0,41,4,114,21,1,0,0,114,130,0, - 0,0,114,1,0,0,0,218,7,109,111,100,117,108,101,115, - 41,3,114,118,0,0,0,90,18,112,97,114,101,110,116,95, - 109,111,100,117,108,101,95,110,97,109,101,90,14,112,97,116, - 104,95,97,116,116,114,95,110,97,109,101,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,16,1,0,0,117, - 4,0,0,115,4,0,0,0,0,1,12,1,122,31,95,78, - 97,109,101,115,112,97,99,101,80,97,116,104,46,95,103,101, - 116,95,112,97,114,101,110,116,95,112,97,116,104,99,1,0, - 0,0,0,0,0,0,0,0,0,0,3,0,0,0,4,0, - 0,0,67,0,0,0,115,80,0,0,0,116,0,124,0,160, - 1,161,0,131,1,125,1,124,1,124,0,106,2,107,3,114, - 74,124,0,160,3,124,0,106,4,124,1,161,2,125,2,124, - 2,100,0,117,1,114,68,124,2,106,5,100,0,117,0,114, - 68,124,2,106,6,114,68,124,2,106,6,124,0,95,7,124, - 1,124,0,95,2,124,0,106,7,83,0,114,109,0,0,0, - 41,8,114,111,0,0,0,114,16,1,0,0,114,17,1,0, - 0,114,18,1,0,0,114,14,1,0,0,114,140,0,0,0, - 114,178,0,0,0,114,15,1,0,0,41,3,114,118,0,0, - 0,90,11,112,97,114,101,110,116,95,112,97,116,104,114,187, - 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,218,12,95,114,101,99,97,108,99,117,108,97,116,101, - 121,4,0,0,115,16,0,0,0,0,2,12,1,10,1,14, - 3,18,1,6,1,8,1,6,1,122,27,95,78,97,109,101, - 115,112,97,99,101,80,97,116,104,46,95,114,101,99,97,108, - 99,117,108,97,116,101,99,1,0,0,0,0,0,0,0,0, - 0,0,0,1,0,0,0,3,0,0,0,67,0,0,0,115, - 12,0,0,0,116,0,124,0,160,1,161,0,131,1,83,0, - 114,109,0,0,0,41,2,218,4,105,116,101,114,114,23,1, - 0,0,114,246,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,218,8,95,95,105,116,101,114,95,95, - 134,4,0,0,115,2,0,0,0,0,1,122,23,95,78,97, - 109,101,115,112,97,99,101,80,97,116,104,46,95,95,105,116, - 101,114,95,95,99,2,0,0,0,0,0,0,0,0,0,0, - 0,2,0,0,0,2,0,0,0,67,0,0,0,115,12,0, - 0,0,124,0,160,0,161,0,124,1,25,0,83,0,114,109, - 0,0,0,169,1,114,23,1,0,0,41,2,114,118,0,0, - 0,218,5,105,110,100,101,120,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,218,11,95,95,103,101,116,105,116, - 101,109,95,95,137,4,0,0,115,2,0,0,0,0,1,122, - 26,95,78,97,109,101,115,112,97,99,101,80,97,116,104,46, - 95,95,103,101,116,105,116,101,109,95,95,99,3,0,0,0, - 0,0,0,0,0,0,0,0,3,0,0,0,3,0,0,0, - 67,0,0,0,115,14,0,0,0,124,2,124,0,106,0,124, - 1,60,0,100,0,83,0,114,109,0,0,0,41,1,114,15, - 1,0,0,41,3,114,118,0,0,0,114,27,1,0,0,114, - 44,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,218,11,95,95,115,101,116,105,116,101,109,95,95, - 140,4,0,0,115,2,0,0,0,0,1,122,26,95,78,97, - 109,101,115,112,97,99,101,80,97,116,104,46,95,95,115,101, - 116,105,116,101,109,95,95,99,1,0,0,0,0,0,0,0, - 0,0,0,0,1,0,0,0,3,0,0,0,67,0,0,0, - 115,12,0,0,0,116,0,124,0,160,1,161,0,131,1,83, - 0,114,109,0,0,0,41,2,114,23,0,0,0,114,23,1, - 0,0,114,246,0,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,218,7,95,95,108,101,110,95,95,143, - 4,0,0,115,2,0,0,0,0,1,122,22,95,78,97,109, - 101,115,112,97,99,101,80,97,116,104,46,95,95,108,101,110, - 95,95,99,1,0,0,0,0,0,0,0,0,0,0,0,1, - 0,0,0,3,0,0,0,67,0,0,0,115,12,0,0,0, - 100,1,160,0,124,0,106,1,161,1,83,0,41,2,78,122, - 20,95,78,97,109,101,115,112,97,99,101,80,97,116,104,40, - 123,33,114,125,41,41,2,114,62,0,0,0,114,15,1,0, - 0,114,246,0,0,0,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,218,8,95,95,114,101,112,114,95,95,146, - 4,0,0,115,2,0,0,0,0,1,122,23,95,78,97,109, - 101,115,112,97,99,101,80,97,116,104,46,95,95,114,101,112, - 114,95,95,99,2,0,0,0,0,0,0,0,0,0,0,0, - 2,0,0,0,3,0,0,0,67,0,0,0,115,12,0,0, - 0,124,1,124,0,160,0,161,0,118,0,83,0,114,109,0, - 0,0,114,26,1,0,0,169,2,114,118,0,0,0,218,4, - 105,116,101,109,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,218,12,95,95,99,111,110,116,97,105,110,115,95, - 95,149,4,0,0,115,2,0,0,0,0,1,122,27,95,78, - 97,109,101,115,112,97,99,101,80,97,116,104,46,95,95,99, - 111,110,116,97,105,110,115,95,95,99,2,0,0,0,0,0, - 0,0,0,0,0,0,2,0,0,0,3,0,0,0,67,0, - 0,0,115,16,0,0,0,124,0,106,0,160,1,124,1,161, - 1,1,0,100,0,83,0,114,109,0,0,0,41,2,114,15, - 1,0,0,114,186,0,0,0,114,32,1,0,0,114,5,0, - 0,0,114,5,0,0,0,114,8,0,0,0,114,186,0,0, - 0,152,4,0,0,115,2,0,0,0,0,1,122,21,95,78, - 97,109,101,115,112,97,99,101,80,97,116,104,46,97,112,112, - 101,110,100,78,41,15,114,125,0,0,0,114,124,0,0,0, - 114,126,0,0,0,114,127,0,0,0,114,209,0,0,0,114, - 21,1,0,0,114,16,1,0,0,114,23,1,0,0,114,25, - 1,0,0,114,28,1,0,0,114,29,1,0,0,114,30,1, - 0,0,114,31,1,0,0,114,34,1,0,0,114,186,0,0, + 213,0,0,0,88,3,0,0,115,152,0,0,0,0,7,10, + 1,4,1,4,1,4,1,4,1,4,1,2,1,12,1,12, + 1,12,2,2,1,14,1,12,1,8,2,12,1,2,1,14, + 1,12,1,6,3,2,1,2,254,6,4,2,1,12,1,16, + 1,12,1,6,1,12,1,12,1,2,255,2,2,8,254,4, + 3,10,1,4,1,2,1,2,254,4,4,8,1,2,255,6, + 3,2,1,2,1,2,1,6,1,2,1,2,251,8,7,18, + 1,6,2,8,1,2,255,4,2,6,1,2,1,2,254,6, + 3,10,1,10,1,12,1,12,1,18,1,6,255,4,2,6, + 1,10,1,10,1,14,2,6,1,6,255,4,2,2,1,18, + 1,14,1,6,1,122,21,83,111,117,114,99,101,76,111,97, + 100,101,114,46,103,101,116,95,99,111,100,101,78,41,10,114, + 125,0,0,0,114,124,0,0,0,114,126,0,0,0,114,223, + 0,0,0,114,224,0,0,0,114,226,0,0,0,114,225,0, + 0,0,114,229,0,0,0,114,233,0,0,0,114,213,0,0, 0,114,5,0,0,0,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,114,13,1,0,0,94,4,0,0,115,24, - 0,0,0,8,1,4,6,8,6,8,10,8,4,8,13,8, - 3,8,3,8,3,8,3,8,3,8,3,114,13,1,0,0, - 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, - 0,3,0,0,0,64,0,0,0,115,80,0,0,0,101,0, - 90,1,100,0,90,2,100,1,100,2,132,0,90,3,101,4, - 100,3,100,4,132,0,131,1,90,5,100,5,100,6,132,0, - 90,6,100,7,100,8,132,0,90,7,100,9,100,10,132,0, - 90,8,100,11,100,12,132,0,90,9,100,13,100,14,132,0, - 90,10,100,15,100,16,132,0,90,11,100,17,83,0,41,18, - 218,16,95,78,97,109,101,115,112,97,99,101,76,111,97,100, - 101,114,99,4,0,0,0,0,0,0,0,0,0,0,0,4, - 0,0,0,4,0,0,0,67,0,0,0,115,18,0,0,0, - 116,0,124,1,124,2,124,3,131,3,124,0,95,1,100,0, - 83,0,114,109,0,0,0,41,2,114,13,1,0,0,114,15, - 1,0,0,114,19,1,0,0,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,209,0,0,0,158,4,0,0, - 115,2,0,0,0,0,1,122,25,95,78,97,109,101,115,112, - 97,99,101,76,111,97,100,101,114,46,95,95,105,110,105,116, - 95,95,99,2,0,0,0,0,0,0,0,0,0,0,0,2, - 0,0,0,3,0,0,0,67,0,0,0,115,12,0,0,0, - 100,1,160,0,124,1,106,1,161,1,83,0,41,2,122,115, - 82,101,116,117,114,110,32,114,101,112,114,32,102,111,114,32, - 116,104,101,32,109,111,100,117,108,101,46,10,10,32,32,32, - 32,32,32,32,32,84,104,101,32,109,101,116,104,111,100,32, + 114,8,0,0,0,114,221,0,0,0,29,3,0,0,115,14, + 0,0,0,8,2,8,8,8,14,8,10,8,7,8,10,14, + 8,114,221,0,0,0,99,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,4,0,0,0,0,0,0,0,115, + 92,0,0,0,101,0,90,1,100,0,90,2,100,1,90,3, + 100,2,100,3,132,0,90,4,100,4,100,5,132,0,90,5, + 100,6,100,7,132,0,90,6,101,7,135,0,102,1,100,8, + 100,9,132,8,131,1,90,8,101,7,100,10,100,11,132,0, + 131,1,90,9,100,12,100,13,132,0,90,10,101,7,100,14, + 100,15,132,0,131,1,90,11,135,0,4,0,90,12,83,0, + 41,16,218,10,70,105,108,101,76,111,97,100,101,114,122,103, + 66,97,115,101,32,102,105,108,101,32,108,111,97,100,101,114, + 32,99,108,97,115,115,32,119,104,105,99,104,32,105,109,112, + 108,101,109,101,110,116,115,32,116,104,101,32,108,111,97,100, + 101,114,32,112,114,111,116,111,99,111,108,32,109,101,116,104, + 111,100,115,32,116,104,97,116,10,32,32,32,32,114,101,113, + 117,105,114,101,32,102,105,108,101,32,115,121,115,116,101,109, + 32,117,115,97,103,101,46,99,3,0,0,0,0,0,0,0, + 0,0,0,0,3,0,0,0,2,0,0,0,67,0,0,0, + 115,16,0,0,0,124,1,124,0,95,0,124,2,124,0,95, + 1,100,1,83,0,41,2,122,75,67,97,99,104,101,32,116, + 104,101,32,109,111,100,117,108,101,32,110,97,109,101,32,97, + 110,100,32,116,104,101,32,112,97,116,104,32,116,111,32,116, + 104,101,32,102,105,108,101,32,102,111,117,110,100,32,98,121, + 32,116,104,101,10,32,32,32,32,32,32,32,32,102,105,110, + 100,101,114,46,78,114,159,0,0,0,41,3,114,118,0,0, + 0,114,139,0,0,0,114,44,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,114,209,0,0,0,178, + 3,0,0,115,4,0,0,0,0,3,6,1,122,19,70,105, + 108,101,76,111,97,100,101,114,46,95,95,105,110,105,116,95, + 95,99,2,0,0,0,0,0,0,0,0,0,0,0,2,0, + 0,0,2,0,0,0,67,0,0,0,115,24,0,0,0,124, + 0,106,0,124,1,106,0,107,2,111,22,124,0,106,1,124, + 1,106,1,107,2,83,0,114,109,0,0,0,169,2,218,9, + 95,95,99,108,97,115,115,95,95,114,131,0,0,0,169,2, + 114,118,0,0,0,90,5,111,116,104,101,114,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,218,6,95,95,101, + 113,95,95,184,3,0,0,115,6,0,0,0,0,1,12,1, + 10,255,122,17,70,105,108,101,76,111,97,100,101,114,46,95, + 95,101,113,95,95,99,1,0,0,0,0,0,0,0,0,0, + 0,0,1,0,0,0,3,0,0,0,67,0,0,0,115,20, + 0,0,0,116,0,124,0,106,1,131,1,116,0,124,0,106, + 2,131,1,65,0,83,0,114,109,0,0,0,169,3,218,4, + 104,97,115,104,114,116,0,0,0,114,44,0,0,0,169,1, + 114,118,0,0,0,114,5,0,0,0,114,5,0,0,0,114, + 8,0,0,0,218,8,95,95,104,97,115,104,95,95,188,3, + 0,0,115,2,0,0,0,0,1,122,19,70,105,108,101,76, + 111,97,100,101,114,46,95,95,104,97,115,104,95,95,99,2, + 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,3, + 0,0,0,3,0,0,0,115,16,0,0,0,116,0,116,1, + 124,0,131,2,160,2,124,1,161,1,83,0,41,1,122,100, + 76,111,97,100,32,97,32,109,111,100,117,108,101,32,102,114, + 111,109,32,97,32,102,105,108,101,46,10,10,32,32,32,32, + 32,32,32,32,84,104,105,115,32,109,101,116,104,111,100,32, 105,115,32,100,101,112,114,101,99,97,116,101,100,46,32,32, - 84,104,101,32,105,109,112,111,114,116,32,109,97,99,104,105, - 110,101,114,121,32,100,111,101,115,32,116,104,101,32,106,111, - 98,32,105,116,115,101,108,102,46,10,10,32,32,32,32,32, - 32,32,32,122,25,60,109,111,100,117,108,101,32,123,33,114, - 125,32,40,110,97,109,101,115,112,97,99,101,41,62,41,2, - 114,62,0,0,0,114,125,0,0,0,41,2,114,193,0,0, - 0,114,216,0,0,0,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,218,11,109,111,100,117,108,101,95,114,101, - 112,114,161,4,0,0,115,2,0,0,0,0,7,122,28,95, - 78,97,109,101,115,112,97,99,101,76,111,97,100,101,114,46, - 109,111,100,117,108,101,95,114,101,112,114,99,2,0,0,0, - 0,0,0,0,0,0,0,0,2,0,0,0,1,0,0,0, - 67,0,0,0,115,4,0,0,0,100,1,83,0,41,2,78, - 84,114,5,0,0,0,114,219,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,182,0,0,0,170, - 4,0,0,115,2,0,0,0,0,1,122,27,95,78,97,109, - 101,115,112,97,99,101,76,111,97,100,101,114,46,105,115,95, - 112,97,99,107,97,103,101,99,2,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,1,0,0,0,67,0,0,0, - 115,4,0,0,0,100,1,83,0,41,2,78,114,40,0,0, - 0,114,5,0,0,0,114,219,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,229,0,0,0,173, - 4,0,0,115,2,0,0,0,0,1,122,27,95,78,97,109, - 101,115,112,97,99,101,76,111,97,100,101,114,46,103,101,116, - 95,115,111,117,114,99,101,99,2,0,0,0,0,0,0,0, - 0,0,0,0,2,0,0,0,6,0,0,0,67,0,0,0, - 115,16,0,0,0,116,0,100,1,100,2,100,3,100,4,100, - 5,141,4,83,0,41,6,78,114,40,0,0,0,122,8,60, - 115,116,114,105,110,103,62,114,215,0,0,0,84,41,1,114, - 231,0,0,0,41,1,114,232,0,0,0,114,219,0,0,0, + 85,115,101,32,101,120,101,99,95,109,111,100,117,108,101,40, + 41,32,105,110,115,116,101,97,100,46,10,10,32,32,32,32, + 32,32,32,32,41,3,218,5,115,117,112,101,114,114,239,0, + 0,0,114,220,0,0,0,114,219,0,0,0,169,1,114,241, + 0,0,0,114,5,0,0,0,114,8,0,0,0,114,220,0, + 0,0,191,3,0,0,115,2,0,0,0,0,10,122,22,70, + 105,108,101,76,111,97,100,101,114,46,108,111,97,100,95,109, + 111,100,117,108,101,99,2,0,0,0,0,0,0,0,0,0, + 0,0,2,0,0,0,1,0,0,0,67,0,0,0,115,6, + 0,0,0,124,0,106,0,83,0,169,1,122,58,82,101,116, + 117,114,110,32,116,104,101,32,112,97,116,104,32,116,111,32, + 116,104,101,32,115,111,117,114,99,101,32,102,105,108,101,32, + 97,115,32,102,111,117,110,100,32,98,121,32,116,104,101,32, + 102,105,110,100,101,114,46,114,48,0,0,0,114,219,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,179,0,0,0,203,3,0,0,115,2,0,0,0,0,3, + 122,23,70,105,108,101,76,111,97,100,101,114,46,103,101,116, + 95,102,105,108,101,110,97,109,101,99,2,0,0,0,0,0, + 0,0,0,0,0,0,3,0,0,0,8,0,0,0,67,0, + 0,0,115,126,0,0,0,116,0,124,0,116,1,116,2,102, + 2,131,2,114,70,116,3,160,4,116,5,124,1,131,1,161, + 1,143,24,125,2,124,2,160,6,161,0,87,0,2,0,100, + 1,4,0,4,0,131,3,1,0,83,0,49,0,115,58,48, + 0,1,0,1,0,1,0,89,0,1,0,110,52,116,3,160, + 7,124,1,100,2,161,2,143,24,125,2,124,2,160,6,161, + 0,87,0,2,0,100,1,4,0,4,0,131,3,1,0,83, + 0,49,0,115,112,48,0,1,0,1,0,1,0,89,0,1, + 0,100,1,83,0,41,3,122,39,82,101,116,117,114,110,32, + 116,104,101,32,100,97,116,97,32,102,114,111,109,32,112,97, + 116,104,32,97,115,32,114,97,119,32,98,121,116,101,115,46, + 78,218,1,114,41,8,114,161,0,0,0,114,221,0,0,0, + 218,19,69,120,116,101,110,115,105,111,110,70,105,108,101,76, + 111,97,100,101,114,114,64,0,0,0,90,9,111,112,101,110, + 95,99,111,100,101,114,84,0,0,0,90,4,114,101,97,100, + 114,65,0,0,0,41,3,114,118,0,0,0,114,44,0,0, + 0,114,68,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,114,227,0,0,0,208,3,0,0,115,10, + 0,0,0,0,2,14,1,16,1,40,2,14,1,122,19,70, + 105,108,101,76,111,97,100,101,114,46,103,101,116,95,100,97, + 116,97,99,2,0,0,0,0,0,0,0,0,0,0,0,3, + 0,0,0,2,0,0,0,67,0,0,0,115,20,0,0,0, + 100,1,100,2,108,0,109,1,125,2,1,0,124,2,124,0, + 131,1,83,0,41,3,78,114,73,0,0,0,41,1,218,10, + 70,105,108,101,82,101,97,100,101,114,41,2,90,17,105,109, + 112,111,114,116,108,105,98,46,114,101,97,100,101,114,115,114, + 253,0,0,0,41,3,114,118,0,0,0,114,216,0,0,0, + 114,253,0,0,0,114,5,0,0,0,114,5,0,0,0,114, + 8,0,0,0,218,19,103,101,116,95,114,101,115,111,117,114, + 99,101,95,114,101,97,100,101,114,217,3,0,0,115,4,0, + 0,0,0,2,12,1,122,30,70,105,108,101,76,111,97,100, + 101,114,46,103,101,116,95,114,101,115,111,117,114,99,101,95, + 114,101,97,100,101,114,41,13,114,125,0,0,0,114,124,0, + 0,0,114,126,0,0,0,114,127,0,0,0,114,209,0,0, + 0,114,243,0,0,0,114,247,0,0,0,114,136,0,0,0, + 114,220,0,0,0,114,179,0,0,0,114,227,0,0,0,114, + 254,0,0,0,90,13,95,95,99,108,97,115,115,99,101,108, + 108,95,95,114,5,0,0,0,114,5,0,0,0,114,249,0, + 0,0,114,8,0,0,0,114,239,0,0,0,173,3,0,0, + 115,22,0,0,0,8,2,4,3,8,6,8,4,8,3,2, + 1,14,11,2,1,10,4,8,9,2,1,114,239,0,0,0, + 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,3,0,0,0,64,0,0,0,115,46,0,0,0,101,0, + 90,1,100,0,90,2,100,1,90,3,100,2,100,3,132,0, + 90,4,100,4,100,5,132,0,90,5,100,6,100,7,156,1, + 100,8,100,9,132,2,90,6,100,10,83,0,41,11,218,16, + 83,111,117,114,99,101,70,105,108,101,76,111,97,100,101,114, + 122,62,67,111,110,99,114,101,116,101,32,105,109,112,108,101, + 109,101,110,116,97,116,105,111,110,32,111,102,32,83,111,117, + 114,99,101,76,111,97,100,101,114,32,117,115,105,110,103,32, + 116,104,101,32,102,105,108,101,32,115,121,115,116,101,109,46, + 99,2,0,0,0,0,0,0,0,0,0,0,0,3,0,0, + 0,3,0,0,0,67,0,0,0,115,22,0,0,0,116,0, + 124,1,131,1,125,2,124,2,106,1,124,2,106,2,100,1, + 156,2,83,0,41,2,122,33,82,101,116,117,114,110,32,116, + 104,101,32,109,101,116,97,100,97,116,97,32,102,111,114,32, + 116,104,101,32,112,97,116,104,46,41,2,114,169,0,0,0, + 114,234,0,0,0,41,3,114,49,0,0,0,218,8,115,116, + 95,109,116,105,109,101,90,7,115,116,95,115,105,122,101,41, + 3,114,118,0,0,0,114,44,0,0,0,114,238,0,0,0, 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 213,0,0,0,176,4,0,0,115,2,0,0,0,0,1,122, - 25,95,78,97,109,101,115,112,97,99,101,76,111,97,100,101, + 224,0,0,0,227,3,0,0,115,4,0,0,0,0,2,8, + 1,122,27,83,111,117,114,99,101,70,105,108,101,76,111,97, + 100,101,114,46,112,97,116,104,95,115,116,97,116,115,99,4, + 0,0,0,0,0,0,0,0,0,0,0,5,0,0,0,5, + 0,0,0,67,0,0,0,115,24,0,0,0,116,0,124,1, + 131,1,125,4,124,0,106,1,124,2,124,3,124,4,100,1, + 141,3,83,0,41,2,78,169,1,218,5,95,109,111,100,101, + 41,2,114,114,0,0,0,114,225,0,0,0,41,5,114,118, + 0,0,0,114,107,0,0,0,114,106,0,0,0,114,26,0, + 0,0,114,52,0,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,114,226,0,0,0,232,3,0,0,115, + 4,0,0,0,0,2,8,1,122,32,83,111,117,114,99,101, + 70,105,108,101,76,111,97,100,101,114,46,95,99,97,99,104, + 101,95,98,121,116,101,99,111,100,101,114,60,0,0,0,114, + 1,1,0,0,99,3,0,0,0,0,0,0,0,1,0,0, + 0,9,0,0,0,11,0,0,0,67,0,0,0,115,252,0, + 0,0,116,0,124,1,131,1,92,2,125,4,125,5,103,0, + 125,6,124,4,114,52,116,1,124,4,131,1,115,52,116,0, + 124,4,131,1,92,2,125,4,125,7,124,6,160,2,124,7, + 161,1,1,0,113,16,116,3,124,6,131,1,68,0,93,104, + 125,7,116,4,124,4,124,7,131,2,125,4,122,14,116,5, + 160,6,124,4,161,1,1,0,87,0,113,60,4,0,116,7, + 121,110,1,0,1,0,1,0,89,0,113,60,89,0,113,60, + 4,0,116,8,121,162,1,0,125,8,1,0,122,30,116,9, + 160,10,100,1,124,4,124,8,161,3,1,0,87,0,89,0, + 100,2,125,8,126,8,1,0,100,2,83,0,100,2,125,8, + 126,8,48,0,48,0,113,60,122,28,116,11,124,1,124,2, + 124,3,131,3,1,0,116,9,160,10,100,3,124,1,161,2, + 1,0,87,0,110,52,4,0,116,8,144,0,121,246,1,0, + 125,8,1,0,122,26,116,9,160,10,100,1,124,1,124,8, + 161,3,1,0,87,0,89,0,100,2,125,8,126,8,110,10, + 100,2,125,8,126,8,48,0,48,0,100,2,83,0,41,4, + 122,27,87,114,105,116,101,32,98,121,116,101,115,32,100,97, + 116,97,32,116,111,32,97,32,102,105,108,101,46,122,27,99, + 111,117,108,100,32,110,111,116,32,99,114,101,97,116,101,32, + 123,33,114,125,58,32,123,33,114,125,78,122,12,99,114,101, + 97,116,101,100,32,123,33,114,125,41,12,114,47,0,0,0, + 114,56,0,0,0,114,186,0,0,0,114,42,0,0,0,114, + 38,0,0,0,114,4,0,0,0,90,5,109,107,100,105,114, + 218,15,70,105,108,101,69,120,105,115,116,115,69,114,114,111, + 114,114,50,0,0,0,114,134,0,0,0,114,149,0,0,0, + 114,69,0,0,0,41,9,114,118,0,0,0,114,44,0,0, + 0,114,26,0,0,0,114,2,1,0,0,218,6,112,97,114, + 101,110,116,114,96,0,0,0,114,37,0,0,0,114,33,0, + 0,0,114,228,0,0,0,114,5,0,0,0,114,5,0,0, + 0,114,8,0,0,0,114,225,0,0,0,237,3,0,0,115, + 46,0,0,0,0,2,12,1,4,2,12,1,12,1,12,2, + 12,1,10,1,2,1,14,1,12,2,8,1,14,3,6,1, + 4,255,4,2,28,1,2,1,12,1,16,1,16,2,8,1, + 2,255,122,25,83,111,117,114,99,101,70,105,108,101,76,111, + 97,100,101,114,46,115,101,116,95,100,97,116,97,78,41,7, + 114,125,0,0,0,114,124,0,0,0,114,126,0,0,0,114, + 127,0,0,0,114,224,0,0,0,114,226,0,0,0,114,225, + 0,0,0,114,5,0,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,255,0,0,0,223,3,0,0, + 115,8,0,0,0,8,2,4,2,8,5,8,5,114,255,0, + 0,0,99,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,0,0,2,0,0,0,64,0,0,0,115,32,0,0,0, + 101,0,90,1,100,0,90,2,100,1,90,3,100,2,100,3, + 132,0,90,4,100,4,100,5,132,0,90,5,100,6,83,0, + 41,7,218,20,83,111,117,114,99,101,108,101,115,115,70,105, + 108,101,76,111,97,100,101,114,122,45,76,111,97,100,101,114, + 32,119,104,105,99,104,32,104,97,110,100,108,101,115,32,115, + 111,117,114,99,101,108,101,115,115,32,102,105,108,101,32,105, + 109,112,111,114,116,115,46,99,2,0,0,0,0,0,0,0, + 0,0,0,0,5,0,0,0,5,0,0,0,67,0,0,0, + 115,68,0,0,0,124,0,160,0,124,1,161,1,125,2,124, + 0,160,1,124,2,161,1,125,3,124,1,124,2,100,1,156, + 2,125,4,116,2,124,3,124,1,124,4,131,3,1,0,116, + 3,116,4,124,3,131,1,100,2,100,0,133,2,25,0,124, + 1,124,2,100,3,141,3,83,0,41,4,78,114,159,0,0, + 0,114,145,0,0,0,41,2,114,116,0,0,0,114,106,0, + 0,0,41,5,114,179,0,0,0,114,227,0,0,0,114,152, + 0,0,0,114,165,0,0,0,114,235,0,0,0,41,5,114, + 118,0,0,0,114,139,0,0,0,114,44,0,0,0,114,26, + 0,0,0,114,151,0,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,213,0,0,0,16,4,0,0, + 115,22,0,0,0,0,1,10,1,10,4,2,1,2,254,6, + 4,12,1,2,1,14,1,2,1,2,253,122,29,83,111,117, + 114,99,101,108,101,115,115,70,105,108,101,76,111,97,100,101, 114,46,103,101,116,95,99,111,100,101,99,2,0,0,0,0, 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, - 0,0,0,115,4,0,0,0,100,1,83,0,114,210,0,0, - 0,114,5,0,0,0,114,211,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,212,0,0,0,179, - 4,0,0,115,2,0,0,0,0,1,122,30,95,78,97,109, - 101,115,112,97,99,101,76,111,97,100,101,114,46,99,114,101, - 97,116,101,95,109,111,100,117,108,101,99,2,0,0,0,0, + 0,0,0,115,4,0,0,0,100,1,83,0,41,2,122,39, + 82,101,116,117,114,110,32,78,111,110,101,32,97,115,32,116, + 104,101,114,101,32,105,115,32,110,111,32,115,111,117,114,99, + 101,32,99,111,100,101,46,78,114,5,0,0,0,114,219,0, + 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, + 0,114,229,0,0,0,32,4,0,0,115,2,0,0,0,0, + 2,122,31,83,111,117,114,99,101,108,101,115,115,70,105,108, + 101,76,111,97,100,101,114,46,103,101,116,95,115,111,117,114, + 99,101,78,41,6,114,125,0,0,0,114,124,0,0,0,114, + 126,0,0,0,114,127,0,0,0,114,213,0,0,0,114,229, + 0,0,0,114,5,0,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,5,1,0,0,12,4,0,0, + 115,6,0,0,0,8,2,4,2,8,16,114,5,1,0,0, + 99,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, + 0,3,0,0,0,64,0,0,0,115,92,0,0,0,101,0, + 90,1,100,0,90,2,100,1,90,3,100,2,100,3,132,0, + 90,4,100,4,100,5,132,0,90,5,100,6,100,7,132,0, + 90,6,100,8,100,9,132,0,90,7,100,10,100,11,132,0, + 90,8,100,12,100,13,132,0,90,9,100,14,100,15,132,0, + 90,10,100,16,100,17,132,0,90,11,101,12,100,18,100,19, + 132,0,131,1,90,13,100,20,83,0,41,21,114,252,0,0, + 0,122,93,76,111,97,100,101,114,32,102,111,114,32,101,120, + 116,101,110,115,105,111,110,32,109,111,100,117,108,101,115,46, + 10,10,32,32,32,32,84,104,101,32,99,111,110,115,116,114, + 117,99,116,111,114,32,105,115,32,100,101,115,105,103,110,101, + 100,32,116,111,32,119,111,114,107,32,119,105,116,104,32,70, + 105,108,101,70,105,110,100,101,114,46,10,10,32,32,32,32, + 99,3,0,0,0,0,0,0,0,0,0,0,0,3,0,0, + 0,2,0,0,0,67,0,0,0,115,16,0,0,0,124,1, + 124,0,95,0,124,2,124,0,95,1,100,0,83,0,114,109, + 0,0,0,114,159,0,0,0,41,3,114,118,0,0,0,114, + 116,0,0,0,114,44,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,209,0,0,0,49,4,0, + 0,115,4,0,0,0,0,1,6,1,122,28,69,120,116,101, + 110,115,105,111,110,70,105,108,101,76,111,97,100,101,114,46, + 95,95,105,110,105,116,95,95,99,2,0,0,0,0,0,0, + 0,0,0,0,0,2,0,0,0,2,0,0,0,67,0,0, + 0,115,24,0,0,0,124,0,106,0,124,1,106,0,107,2, + 111,22,124,0,106,1,124,1,106,1,107,2,83,0,114,109, + 0,0,0,114,240,0,0,0,114,242,0,0,0,114,5,0, + 0,0,114,5,0,0,0,114,8,0,0,0,114,243,0,0, + 0,53,4,0,0,115,6,0,0,0,0,1,12,1,10,255, + 122,26,69,120,116,101,110,115,105,111,110,70,105,108,101,76, + 111,97,100,101,114,46,95,95,101,113,95,95,99,1,0,0, + 0,0,0,0,0,0,0,0,0,1,0,0,0,3,0,0, + 0,67,0,0,0,115,20,0,0,0,116,0,124,0,106,1, + 131,1,116,0,124,0,106,2,131,1,65,0,83,0,114,109, + 0,0,0,114,244,0,0,0,114,246,0,0,0,114,5,0, + 0,0,114,5,0,0,0,114,8,0,0,0,114,247,0,0, + 0,57,4,0,0,115,2,0,0,0,0,1,122,28,69,120, + 116,101,110,115,105,111,110,70,105,108,101,76,111,97,100,101, + 114,46,95,95,104,97,115,104,95,95,99,2,0,0,0,0, + 0,0,0,0,0,0,0,3,0,0,0,5,0,0,0,67, + 0,0,0,115,36,0,0,0,116,0,160,1,116,2,106,3, + 124,1,161,2,125,2,116,0,160,4,100,1,124,1,106,5, + 124,0,106,6,161,3,1,0,124,2,83,0,41,2,122,38, + 67,114,101,97,116,101,32,97,110,32,117,110,105,116,105,97, + 108,105,122,101,100,32,101,120,116,101,110,115,105,111,110,32, + 109,111,100,117,108,101,122,38,101,120,116,101,110,115,105,111, + 110,32,109,111,100,117,108,101,32,123,33,114,125,32,108,111, + 97,100,101,100,32,102,114,111,109,32,123,33,114,125,41,7, + 114,134,0,0,0,114,214,0,0,0,114,163,0,0,0,90, + 14,99,114,101,97,116,101,95,100,121,110,97,109,105,99,114, + 149,0,0,0,114,116,0,0,0,114,44,0,0,0,41,3, + 114,118,0,0,0,114,187,0,0,0,114,216,0,0,0,114, + 5,0,0,0,114,5,0,0,0,114,8,0,0,0,114,212, + 0,0,0,60,4,0,0,115,14,0,0,0,0,2,4,1, + 6,255,4,2,6,1,8,255,4,2,122,33,69,120,116,101, + 110,115,105,111,110,70,105,108,101,76,111,97,100,101,114,46, + 99,114,101,97,116,101,95,109,111,100,117,108,101,99,2,0, + 0,0,0,0,0,0,0,0,0,0,2,0,0,0,5,0, + 0,0,67,0,0,0,115,36,0,0,0,116,0,160,1,116, + 2,106,3,124,1,161,2,1,0,116,0,160,4,100,1,124, + 0,106,5,124,0,106,6,161,3,1,0,100,2,83,0,41, + 3,122,30,73,110,105,116,105,97,108,105,122,101,32,97,110, + 32,101,120,116,101,110,115,105,111,110,32,109,111,100,117,108, + 101,122,40,101,120,116,101,110,115,105,111,110,32,109,111,100, + 117,108,101,32,123,33,114,125,32,101,120,101,99,117,116,101, + 100,32,102,114,111,109,32,123,33,114,125,78,41,7,114,134, + 0,0,0,114,214,0,0,0,114,163,0,0,0,90,12,101, + 120,101,99,95,100,121,110,97,109,105,99,114,149,0,0,0, + 114,116,0,0,0,114,44,0,0,0,169,2,114,118,0,0, + 0,114,216,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,114,217,0,0,0,68,4,0,0,115,8, + 0,0,0,0,2,14,1,6,1,8,255,122,31,69,120,116, + 101,110,115,105,111,110,70,105,108,101,76,111,97,100,101,114, + 46,101,120,101,99,95,109,111,100,117,108,101,99,2,0,0, + 0,0,0,0,0,0,0,0,0,2,0,0,0,4,0,0, + 0,3,0,0,0,115,36,0,0,0,116,0,124,0,106,1, + 131,1,100,1,25,0,137,0,116,2,135,0,102,1,100,2, + 100,3,132,8,116,3,68,0,131,1,131,1,83,0,41,4, + 122,49,82,101,116,117,114,110,32,84,114,117,101,32,105,102, + 32,116,104,101,32,101,120,116,101,110,115,105,111,110,32,109, + 111,100,117,108,101,32,105,115,32,97,32,112,97,99,107,97, + 103,101,46,114,39,0,0,0,99,1,0,0,0,0,0,0, + 0,0,0,0,0,2,0,0,0,4,0,0,0,51,0,0, + 0,115,26,0,0,0,124,0,93,18,125,1,136,0,100,0, + 124,1,23,0,107,2,86,0,1,0,113,2,100,1,83,0, + 41,2,114,209,0,0,0,78,114,5,0,0,0,169,2,114, + 32,0,0,0,218,6,115,117,102,102,105,120,169,1,90,9, + 102,105,108,101,95,110,97,109,101,114,5,0,0,0,114,8, + 0,0,0,218,9,60,103,101,110,101,120,112,114,62,77,4, + 0,0,115,4,0,0,0,4,1,2,255,122,49,69,120,116, + 101,110,115,105,111,110,70,105,108,101,76,111,97,100,101,114, + 46,105,115,95,112,97,99,107,97,103,101,46,60,108,111,99, + 97,108,115,62,46,60,103,101,110,101,120,112,114,62,41,4, + 114,47,0,0,0,114,44,0,0,0,218,3,97,110,121,218, + 18,69,88,84,69,78,83,73,79,78,95,83,85,70,70,73, + 88,69,83,114,219,0,0,0,114,5,0,0,0,114,9,1, + 0,0,114,8,0,0,0,114,182,0,0,0,74,4,0,0, + 115,8,0,0,0,0,2,14,1,12,1,2,255,122,30,69, + 120,116,101,110,115,105,111,110,70,105,108,101,76,111,97,100, + 101,114,46,105,115,95,112,97,99,107,97,103,101,99,2,0, + 0,0,0,0,0,0,0,0,0,0,2,0,0,0,1,0, + 0,0,67,0,0,0,115,4,0,0,0,100,1,83,0,41, + 2,122,63,82,101,116,117,114,110,32,78,111,110,101,32,97, + 115,32,97,110,32,101,120,116,101,110,115,105,111,110,32,109, + 111,100,117,108,101,32,99,97,110,110,111,116,32,99,114,101, + 97,116,101,32,97,32,99,111,100,101,32,111,98,106,101,99, + 116,46,78,114,5,0,0,0,114,219,0,0,0,114,5,0, + 0,0,114,5,0,0,0,114,8,0,0,0,114,213,0,0, + 0,80,4,0,0,115,2,0,0,0,0,2,122,28,69,120, + 116,101,110,115,105,111,110,70,105,108,101,76,111,97,100,101, + 114,46,103,101,116,95,99,111,100,101,99,2,0,0,0,0, 0,0,0,0,0,0,0,2,0,0,0,1,0,0,0,67, - 0,0,0,115,4,0,0,0,100,0,83,0,114,109,0,0, - 0,114,5,0,0,0,114,6,1,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,217,0,0,0,182, - 4,0,0,115,2,0,0,0,0,1,122,28,95,78,97,109, - 101,115,112,97,99,101,76,111,97,100,101,114,46,101,120,101, - 99,95,109,111,100,117,108,101,99,2,0,0,0,0,0,0, - 0,0,0,0,0,2,0,0,0,4,0,0,0,67,0,0, - 0,115,26,0,0,0,116,0,160,1,100,1,124,0,106,2, - 161,2,1,0,116,0,160,3,124,0,124,1,161,2,83,0, - 41,2,122,98,76,111,97,100,32,97,32,110,97,109,101,115, - 112,97,99,101,32,109,111,100,117,108,101,46,10,10,32,32, - 32,32,32,32,32,32,84,104,105,115,32,109,101,116,104,111, - 100,32,105,115,32,100,101,112,114,101,99,97,116,101,100,46, - 32,32,85,115,101,32,101,120,101,99,95,109,111,100,117,108, - 101,40,41,32,105,110,115,116,101,97,100,46,10,10,32,32, - 32,32,32,32,32,32,122,38,110,97,109,101,115,112,97,99, - 101,32,109,111,100,117,108,101,32,108,111,97,100,101,100,32, - 119,105,116,104,32,112,97,116,104,32,123,33,114,125,41,4, - 114,134,0,0,0,114,149,0,0,0,114,15,1,0,0,114, - 218,0,0,0,114,219,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,220,0,0,0,185,4,0, - 0,115,8,0,0,0,0,7,6,1,4,255,4,2,122,28, - 95,78,97,109,101,115,112,97,99,101,76,111,97,100,101,114, - 46,108,111,97,100,95,109,111,100,117,108,101,78,41,12,114, - 125,0,0,0,114,124,0,0,0,114,126,0,0,0,114,209, - 0,0,0,114,207,0,0,0,114,36,1,0,0,114,182,0, - 0,0,114,229,0,0,0,114,213,0,0,0,114,212,0,0, - 0,114,217,0,0,0,114,220,0,0,0,114,5,0,0,0, + 0,0,0,115,4,0,0,0,100,1,83,0,41,2,122,53, + 82,101,116,117,114,110,32,78,111,110,101,32,97,115,32,101, + 120,116,101,110,115,105,111,110,32,109,111,100,117,108,101,115, + 32,104,97,118,101,32,110,111,32,115,111,117,114,99,101,32, + 99,111,100,101,46,78,114,5,0,0,0,114,219,0,0,0, 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 35,1,0,0,157,4,0,0,115,18,0,0,0,8,1,8, - 3,2,1,10,8,8,3,8,3,8,3,8,3,8,3,114, - 35,1,0,0,99,0,0,0,0,0,0,0,0,0,0,0, - 0,0,0,0,0,4,0,0,0,64,0,0,0,115,118,0, - 0,0,101,0,90,1,100,0,90,2,100,1,90,3,101,4, - 100,2,100,3,132,0,131,1,90,5,101,4,100,4,100,5, - 132,0,131,1,90,6,101,4,100,6,100,7,132,0,131,1, - 90,7,101,4,100,8,100,9,132,0,131,1,90,8,101,4, - 100,19,100,11,100,12,132,1,131,1,90,9,101,4,100,20, - 100,13,100,14,132,1,131,1,90,10,101,4,100,21,100,15, - 100,16,132,1,131,1,90,11,101,4,100,17,100,18,132,0, - 131,1,90,12,100,10,83,0,41,22,218,10,80,97,116,104, - 70,105,110,100,101,114,122,62,77,101,116,97,32,112,97,116, - 104,32,102,105,110,100,101,114,32,102,111,114,32,115,121,115, - 46,112,97,116,104,32,97,110,100,32,112,97,99,107,97,103, - 101,32,95,95,112,97,116,104,95,95,32,97,116,116,114,105, - 98,117,116,101,115,46,99,1,0,0,0,0,0,0,0,0, - 0,0,0,3,0,0,0,4,0,0,0,67,0,0,0,115, - 64,0,0,0,116,0,116,1,106,2,160,3,161,0,131,1, - 68,0,93,44,92,2,125,1,125,2,124,2,100,1,117,0, - 114,40,116,1,106,2,124,1,61,0,113,14,116,4,124,2, - 100,2,131,2,114,14,124,2,160,5,161,0,1,0,113,14, - 100,1,83,0,41,3,122,125,67,97,108,108,32,116,104,101, - 32,105,110,118,97,108,105,100,97,116,101,95,99,97,99,104, - 101,115,40,41,32,109,101,116,104,111,100,32,111,110,32,97, - 108,108,32,112,97,116,104,32,101,110,116,114,121,32,102,105, - 110,100,101,114,115,10,32,32,32,32,32,32,32,32,115,116, - 111,114,101,100,32,105,110,32,115,121,115,46,112,97,116,104, - 95,105,109,112,111,114,116,101,114,95,99,97,99,104,101,115, - 32,40,119,104,101,114,101,32,105,109,112,108,101,109,101,110, - 116,101,100,41,46,78,218,17,105,110,118,97,108,105,100,97, - 116,101,95,99,97,99,104,101,115,41,6,218,4,108,105,115, - 116,114,1,0,0,0,218,19,112,97,116,104,95,105,109,112, - 111,114,116,101,114,95,99,97,99,104,101,218,5,105,116,101, - 109,115,114,128,0,0,0,114,38,1,0,0,41,3,114,193, - 0,0,0,114,116,0,0,0,218,6,102,105,110,100,101,114, + 229,0,0,0,84,4,0,0,115,2,0,0,0,0,2,122, + 30,69,120,116,101,110,115,105,111,110,70,105,108,101,76,111, + 97,100,101,114,46,103,101,116,95,115,111,117,114,99,101,99, + 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, + 1,0,0,0,67,0,0,0,115,6,0,0,0,124,0,106, + 0,83,0,114,250,0,0,0,114,48,0,0,0,114,219,0, + 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, + 0,114,179,0,0,0,88,4,0,0,115,2,0,0,0,0, + 3,122,32,69,120,116,101,110,115,105,111,110,70,105,108,101, + 76,111,97,100,101,114,46,103,101,116,95,102,105,108,101,110, + 97,109,101,78,41,14,114,125,0,0,0,114,124,0,0,0, + 114,126,0,0,0,114,127,0,0,0,114,209,0,0,0,114, + 243,0,0,0,114,247,0,0,0,114,212,0,0,0,114,217, + 0,0,0,114,182,0,0,0,114,213,0,0,0,114,229,0, + 0,0,114,136,0,0,0,114,179,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,252,0,0,0,41,4,0,0,115,22,0,0,0,8,2, + 4,6,8,4,8,4,8,3,8,8,8,6,8,6,8,4, + 8,4,2,1,114,252,0,0,0,99,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,0,2,0,0,0,64,0, + 0,0,115,104,0,0,0,101,0,90,1,100,0,90,2,100, + 1,90,3,100,2,100,3,132,0,90,4,100,4,100,5,132, + 0,90,5,100,6,100,7,132,0,90,6,100,8,100,9,132, + 0,90,7,100,10,100,11,132,0,90,8,100,12,100,13,132, + 0,90,9,100,14,100,15,132,0,90,10,100,16,100,17,132, + 0,90,11,100,18,100,19,132,0,90,12,100,20,100,21,132, + 0,90,13,100,22,100,23,132,0,90,14,100,24,83,0,41, + 25,218,14,95,78,97,109,101,115,112,97,99,101,80,97,116, + 104,97,38,1,0,0,82,101,112,114,101,115,101,110,116,115, + 32,97,32,110,97,109,101,115,112,97,99,101,32,112,97,99, + 107,97,103,101,39,115,32,112,97,116,104,46,32,32,73,116, + 32,117,115,101,115,32,116,104,101,32,109,111,100,117,108,101, + 32,110,97,109,101,10,32,32,32,32,116,111,32,102,105,110, + 100,32,105,116,115,32,112,97,114,101,110,116,32,109,111,100, + 117,108,101,44,32,97,110,100,32,102,114,111,109,32,116,104, + 101,114,101,32,105,116,32,108,111,111,107,115,32,117,112,32, + 116,104,101,32,112,97,114,101,110,116,39,115,10,32,32,32, + 32,95,95,112,97,116,104,95,95,46,32,32,87,104,101,110, + 32,116,104,105,115,32,99,104,97,110,103,101,115,44,32,116, + 104,101,32,109,111,100,117,108,101,39,115,32,111,119,110,32, + 112,97,116,104,32,105,115,32,114,101,99,111,109,112,117,116, + 101,100,44,10,32,32,32,32,117,115,105,110,103,32,112,97, + 116,104,95,102,105,110,100,101,114,46,32,32,70,111,114,32, + 116,111,112,45,108,101,118,101,108,32,109,111,100,117,108,101, + 115,44,32,116,104,101,32,112,97,114,101,110,116,32,109,111, + 100,117,108,101,39,115,32,112,97,116,104,10,32,32,32,32, + 105,115,32,115,121,115,46,112,97,116,104,46,99,4,0,0, + 0,0,0,0,0,0,0,0,0,4,0,0,0,3,0,0, + 0,67,0,0,0,115,36,0,0,0,124,1,124,0,95,0, + 124,2,124,0,95,1,116,2,124,0,160,3,161,0,131,1, + 124,0,95,4,124,3,124,0,95,5,100,0,83,0,114,109, + 0,0,0,41,6,218,5,95,110,97,109,101,218,5,95,112, + 97,116,104,114,111,0,0,0,218,16,95,103,101,116,95,112, + 97,114,101,110,116,95,112,97,116,104,218,17,95,108,97,115, + 116,95,112,97,114,101,110,116,95,112,97,116,104,218,12,95, + 112,97,116,104,95,102,105,110,100,101,114,169,4,114,118,0, + 0,0,114,116,0,0,0,114,44,0,0,0,90,11,112,97, + 116,104,95,102,105,110,100,101,114,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,114,209,0,0,0,101,4,0, + 0,115,8,0,0,0,0,1,6,1,6,1,14,1,122,23, + 95,78,97,109,101,115,112,97,99,101,80,97,116,104,46,95, + 95,105,110,105,116,95,95,99,1,0,0,0,0,0,0,0, + 0,0,0,0,4,0,0,0,3,0,0,0,67,0,0,0, + 115,38,0,0,0,124,0,106,0,160,1,100,1,161,1,92, + 3,125,1,125,2,125,3,124,2,100,2,107,2,114,30,100, + 3,83,0,124,1,100,4,102,2,83,0,41,5,122,62,82, + 101,116,117,114,110,115,32,97,32,116,117,112,108,101,32,111, + 102,32,40,112,97,114,101,110,116,45,109,111,100,117,108,101, + 45,110,97,109,101,44,32,112,97,114,101,110,116,45,112,97, + 116,104,45,97,116,116,114,45,110,97,109,101,41,114,71,0, + 0,0,114,40,0,0,0,41,2,114,1,0,0,0,114,44, + 0,0,0,90,8,95,95,112,97,116,104,95,95,41,2,114, + 14,1,0,0,114,41,0,0,0,41,4,114,118,0,0,0, + 114,4,1,0,0,218,3,100,111,116,90,2,109,101,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,23,95, + 102,105,110,100,95,112,97,114,101,110,116,95,112,97,116,104, + 95,110,97,109,101,115,107,4,0,0,115,8,0,0,0,0, + 2,18,1,8,2,4,3,122,38,95,78,97,109,101,115,112, + 97,99,101,80,97,116,104,46,95,102,105,110,100,95,112,97, + 114,101,110,116,95,112,97,116,104,95,110,97,109,101,115,99, + 1,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0, + 3,0,0,0,67,0,0,0,115,28,0,0,0,124,0,160, + 0,161,0,92,2,125,1,125,2,116,1,116,2,106,3,124, + 1,25,0,124,2,131,2,83,0,114,109,0,0,0,41,4, + 114,21,1,0,0,114,130,0,0,0,114,1,0,0,0,218, + 7,109,111,100,117,108,101,115,41,3,114,118,0,0,0,90, + 18,112,97,114,101,110,116,95,109,111,100,117,108,101,95,110, + 97,109,101,90,14,112,97,116,104,95,97,116,116,114,95,110, + 97,109,101,114,5,0,0,0,114,5,0,0,0,114,8,0, + 0,0,114,16,1,0,0,117,4,0,0,115,4,0,0,0, + 0,1,12,1,122,31,95,78,97,109,101,115,112,97,99,101, + 80,97,116,104,46,95,103,101,116,95,112,97,114,101,110,116, + 95,112,97,116,104,99,1,0,0,0,0,0,0,0,0,0, + 0,0,3,0,0,0,4,0,0,0,67,0,0,0,115,80, + 0,0,0,116,0,124,0,160,1,161,0,131,1,125,1,124, + 1,124,0,106,2,107,3,114,74,124,0,160,3,124,0,106, + 4,124,1,161,2,125,2,124,2,100,0,117,1,114,68,124, + 2,106,5,100,0,117,0,114,68,124,2,106,6,114,68,124, + 2,106,6,124,0,95,7,124,1,124,0,95,2,124,0,106, + 7,83,0,114,109,0,0,0,41,8,114,111,0,0,0,114, + 16,1,0,0,114,17,1,0,0,114,18,1,0,0,114,14, + 1,0,0,114,140,0,0,0,114,178,0,0,0,114,15,1, + 0,0,41,3,114,118,0,0,0,90,11,112,97,114,101,110, + 116,95,112,97,116,104,114,187,0,0,0,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,218,12,95,114,101,99, + 97,108,99,117,108,97,116,101,121,4,0,0,115,16,0,0, + 0,0,2,12,1,10,1,14,3,18,1,6,1,8,1,6, + 1,122,27,95,78,97,109,101,115,112,97,99,101,80,97,116, + 104,46,95,114,101,99,97,108,99,117,108,97,116,101,99,1, + 0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,3, + 0,0,0,67,0,0,0,115,12,0,0,0,116,0,124,0, + 160,1,161,0,131,1,83,0,114,109,0,0,0,41,2,218, + 4,105,116,101,114,114,23,1,0,0,114,246,0,0,0,114, + 5,0,0,0,114,5,0,0,0,114,8,0,0,0,218,8, + 95,95,105,116,101,114,95,95,134,4,0,0,115,2,0,0, + 0,0,1,122,23,95,78,97,109,101,115,112,97,99,101,80, + 97,116,104,46,95,95,105,116,101,114,95,95,99,2,0,0, + 0,0,0,0,0,0,0,0,0,2,0,0,0,2,0,0, + 0,67,0,0,0,115,12,0,0,0,124,0,160,0,161,0, + 124,1,25,0,83,0,114,109,0,0,0,169,1,114,23,1, + 0,0,41,2,114,118,0,0,0,218,5,105,110,100,101,120, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, + 11,95,95,103,101,116,105,116,101,109,95,95,137,4,0,0, + 115,2,0,0,0,0,1,122,26,95,78,97,109,101,115,112, + 97,99,101,80,97,116,104,46,95,95,103,101,116,105,116,101, + 109,95,95,99,3,0,0,0,0,0,0,0,0,0,0,0, + 3,0,0,0,3,0,0,0,67,0,0,0,115,14,0,0, + 0,124,2,124,0,106,0,124,1,60,0,100,0,83,0,114, + 109,0,0,0,41,1,114,15,1,0,0,41,3,114,118,0, + 0,0,114,27,1,0,0,114,44,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,218,11,95,95,115, + 101,116,105,116,101,109,95,95,140,4,0,0,115,2,0,0, + 0,0,1,122,26,95,78,97,109,101,115,112,97,99,101,80, + 97,116,104,46,95,95,115,101,116,105,116,101,109,95,95,99, + 1,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0, + 3,0,0,0,67,0,0,0,115,12,0,0,0,116,0,124, + 0,160,1,161,0,131,1,83,0,114,109,0,0,0,41,2, + 114,23,0,0,0,114,23,1,0,0,114,246,0,0,0,114, + 5,0,0,0,114,5,0,0,0,114,8,0,0,0,218,7, + 95,95,108,101,110,95,95,143,4,0,0,115,2,0,0,0, + 0,1,122,22,95,78,97,109,101,115,112,97,99,101,80,97, + 116,104,46,95,95,108,101,110,95,95,99,1,0,0,0,0, + 0,0,0,0,0,0,0,1,0,0,0,3,0,0,0,67, + 0,0,0,115,12,0,0,0,100,1,160,0,124,0,106,1, + 161,1,83,0,41,2,78,122,20,95,78,97,109,101,115,112, + 97,99,101,80,97,116,104,40,123,33,114,125,41,41,2,114, + 62,0,0,0,114,15,1,0,0,114,246,0,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,8,95, + 95,114,101,112,114,95,95,146,4,0,0,115,2,0,0,0, + 0,1,122,23,95,78,97,109,101,115,112,97,99,101,80,97, + 116,104,46,95,95,114,101,112,114,95,95,99,2,0,0,0, + 0,0,0,0,0,0,0,0,2,0,0,0,3,0,0,0, + 67,0,0,0,115,12,0,0,0,124,1,124,0,160,0,161, + 0,118,0,83,0,114,109,0,0,0,114,26,1,0,0,169, + 2,114,118,0,0,0,218,4,105,116,101,109,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,218,12,95,95,99, + 111,110,116,97,105,110,115,95,95,149,4,0,0,115,2,0, + 0,0,0,1,122,27,95,78,97,109,101,115,112,97,99,101, + 80,97,116,104,46,95,95,99,111,110,116,97,105,110,115,95, + 95,99,2,0,0,0,0,0,0,0,0,0,0,0,2,0, + 0,0,3,0,0,0,67,0,0,0,115,16,0,0,0,124, + 0,106,0,160,1,124,1,161,1,1,0,100,0,83,0,114, + 109,0,0,0,41,2,114,15,1,0,0,114,186,0,0,0, + 114,32,1,0,0,114,5,0,0,0,114,5,0,0,0,114, + 8,0,0,0,114,186,0,0,0,152,4,0,0,115,2,0, + 0,0,0,1,122,21,95,78,97,109,101,115,112,97,99,101, + 80,97,116,104,46,97,112,112,101,110,100,78,41,15,114,125, + 0,0,0,114,124,0,0,0,114,126,0,0,0,114,127,0, + 0,0,114,209,0,0,0,114,21,1,0,0,114,16,1,0, + 0,114,23,1,0,0,114,25,1,0,0,114,28,1,0,0, + 114,29,1,0,0,114,30,1,0,0,114,31,1,0,0,114, + 34,1,0,0,114,186,0,0,0,114,5,0,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,114,13,1, + 0,0,94,4,0,0,115,24,0,0,0,8,1,4,6,8, + 6,8,10,8,4,8,13,8,3,8,3,8,3,8,3,8, + 3,8,3,114,13,1,0,0,99,0,0,0,0,0,0,0, + 0,0,0,0,0,0,0,0,0,3,0,0,0,64,0,0, + 0,115,80,0,0,0,101,0,90,1,100,0,90,2,100,1, + 100,2,132,0,90,3,101,4,100,3,100,4,132,0,131,1, + 90,5,100,5,100,6,132,0,90,6,100,7,100,8,132,0, + 90,7,100,9,100,10,132,0,90,8,100,11,100,12,132,0, + 90,9,100,13,100,14,132,0,90,10,100,15,100,16,132,0, + 90,11,100,17,83,0,41,18,218,16,95,78,97,109,101,115, + 112,97,99,101,76,111,97,100,101,114,99,4,0,0,0,0, + 0,0,0,0,0,0,0,4,0,0,0,4,0,0,0,67, + 0,0,0,115,18,0,0,0,116,0,124,1,124,2,124,3, + 131,3,124,0,95,1,100,0,83,0,114,109,0,0,0,41, + 2,114,13,1,0,0,114,15,1,0,0,114,19,1,0,0, 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, - 38,1,0,0,203,4,0,0,115,10,0,0,0,0,4,22, - 1,8,1,10,1,10,1,122,28,80,97,116,104,70,105,110, - 100,101,114,46,105,110,118,97,108,105,100,97,116,101,95,99, - 97,99,104,101,115,99,2,0,0,0,0,0,0,0,0,0, - 0,0,3,0,0,0,9,0,0,0,67,0,0,0,115,82, - 0,0,0,116,0,106,1,100,1,117,1,114,28,116,0,106, - 1,115,28,116,2,160,3,100,2,116,4,161,2,1,0,116, - 0,106,1,68,0,93,42,125,2,122,14,124,2,124,1,131, - 1,87,0,2,0,1,0,83,0,4,0,116,5,121,74,1, - 0,1,0,1,0,89,0,113,34,89,0,113,34,48,0,113, - 34,100,1,83,0,41,3,122,46,83,101,97,114,99,104,32, - 115,121,115,46,112,97,116,104,95,104,111,111,107,115,32,102, - 111,114,32,97,32,102,105,110,100,101,114,32,102,111,114,32, - 39,112,97,116,104,39,46,78,122,23,115,121,115,46,112,97, - 116,104,95,104,111,111,107,115,32,105,115,32,101,109,112,116, - 121,41,6,114,1,0,0,0,218,10,112,97,116,104,95,104, - 111,111,107,115,114,75,0,0,0,114,76,0,0,0,114,138, - 0,0,0,114,117,0,0,0,41,3,114,193,0,0,0,114, - 44,0,0,0,90,4,104,111,111,107,114,5,0,0,0,114, - 5,0,0,0,114,8,0,0,0,218,11,95,112,97,116,104, - 95,104,111,111,107,115,213,4,0,0,115,16,0,0,0,0, - 3,16,1,12,1,10,1,2,1,14,1,12,1,12,2,122, - 22,80,97,116,104,70,105,110,100,101,114,46,95,112,97,116, - 104,95,104,111,111,107,115,99,2,0,0,0,0,0,0,0, - 0,0,0,0,3,0,0,0,8,0,0,0,67,0,0,0, - 115,100,0,0,0,124,1,100,1,107,2,114,42,122,12,116, - 0,160,1,161,0,125,1,87,0,110,20,4,0,116,2,121, - 40,1,0,1,0,1,0,89,0,100,2,83,0,48,0,122, - 14,116,3,106,4,124,1,25,0,125,2,87,0,110,38,4, - 0,116,5,121,94,1,0,1,0,1,0,124,0,160,6,124, - 1,161,1,125,2,124,2,116,3,106,4,124,1,60,0,89, - 0,110,2,48,0,124,2,83,0,41,3,122,210,71,101,116, - 32,116,104,101,32,102,105,110,100,101,114,32,102,111,114,32, - 116,104,101,32,112,97,116,104,32,101,110,116,114,121,32,102, - 114,111,109,32,115,121,115,46,112,97,116,104,95,105,109,112, - 111,114,116,101,114,95,99,97,99,104,101,46,10,10,32,32, - 32,32,32,32,32,32,73,102,32,116,104,101,32,112,97,116, - 104,32,101,110,116,114,121,32,105,115,32,110,111,116,32,105, - 110,32,116,104,101,32,99,97,99,104,101,44,32,102,105,110, - 100,32,116,104,101,32,97,112,112,114,111,112,114,105,97,116, - 101,32,102,105,110,100,101,114,10,32,32,32,32,32,32,32, - 32,97,110,100,32,99,97,99,104,101,32,105,116,46,32,73, - 102,32,110,111,32,102,105,110,100,101,114,32,105,115,32,97, - 118,97,105,108,97,98,108,101,44,32,115,116,111,114,101,32, - 78,111,110,101,46,10,10,32,32,32,32,32,32,32,32,114, - 40,0,0,0,78,41,7,114,4,0,0,0,114,55,0,0, - 0,218,17,70,105,108,101,78,111,116,70,111,117,110,100,69, - 114,114,111,114,114,1,0,0,0,114,40,1,0,0,218,8, - 75,101,121,69,114,114,111,114,114,44,1,0,0,41,3,114, - 193,0,0,0,114,44,0,0,0,114,42,1,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,218,20,95, + 209,0,0,0,158,4,0,0,115,2,0,0,0,0,1,122, + 25,95,78,97,109,101,115,112,97,99,101,76,111,97,100,101, + 114,46,95,95,105,110,105,116,95,95,99,2,0,0,0,0, + 0,0,0,0,0,0,0,2,0,0,0,3,0,0,0,67, + 0,0,0,115,12,0,0,0,100,1,160,0,124,1,106,1, + 161,1,83,0,41,2,122,115,82,101,116,117,114,110,32,114, + 101,112,114,32,102,111,114,32,116,104,101,32,109,111,100,117, + 108,101,46,10,10,32,32,32,32,32,32,32,32,84,104,101, + 32,109,101,116,104,111,100,32,105,115,32,100,101,112,114,101, + 99,97,116,101,100,46,32,32,84,104,101,32,105,109,112,111, + 114,116,32,109,97,99,104,105,110,101,114,121,32,100,111,101, + 115,32,116,104,101,32,106,111,98,32,105,116,115,101,108,102, + 46,10,10,32,32,32,32,32,32,32,32,122,25,60,109,111, + 100,117,108,101,32,123,33,114,125,32,40,110,97,109,101,115, + 112,97,99,101,41,62,41,2,114,62,0,0,0,114,125,0, + 0,0,41,2,114,193,0,0,0,114,216,0,0,0,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,218,11,109, + 111,100,117,108,101,95,114,101,112,114,161,4,0,0,115,2, + 0,0,0,0,7,122,28,95,78,97,109,101,115,112,97,99, + 101,76,111,97,100,101,114,46,109,111,100,117,108,101,95,114, + 101,112,114,99,2,0,0,0,0,0,0,0,0,0,0,0, + 2,0,0,0,1,0,0,0,67,0,0,0,115,4,0,0, + 0,100,1,83,0,41,2,78,84,114,5,0,0,0,114,219, + 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, + 0,0,114,182,0,0,0,170,4,0,0,115,2,0,0,0, + 0,1,122,27,95,78,97,109,101,115,112,97,99,101,76,111, + 97,100,101,114,46,105,115,95,112,97,99,107,97,103,101,99, + 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, + 1,0,0,0,67,0,0,0,115,4,0,0,0,100,1,83, + 0,41,2,78,114,40,0,0,0,114,5,0,0,0,114,219, + 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, + 0,0,114,229,0,0,0,173,4,0,0,115,2,0,0,0, + 0,1,122,27,95,78,97,109,101,115,112,97,99,101,76,111, + 97,100,101,114,46,103,101,116,95,115,111,117,114,99,101,99, + 2,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, + 6,0,0,0,67,0,0,0,115,16,0,0,0,116,0,100, + 1,100,2,100,3,100,4,100,5,141,4,83,0,41,6,78, + 114,40,0,0,0,122,8,60,115,116,114,105,110,103,62,114, + 215,0,0,0,84,41,1,114,231,0,0,0,41,1,114,232, + 0,0,0,114,219,0,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,213,0,0,0,176,4,0,0, + 115,2,0,0,0,0,1,122,25,95,78,97,109,101,115,112, + 97,99,101,76,111,97,100,101,114,46,103,101,116,95,99,111, + 100,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,1,0,0,0,67,0,0,0,115,4,0,0,0, + 100,1,83,0,114,210,0,0,0,114,5,0,0,0,114,211, + 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, + 0,0,114,212,0,0,0,179,4,0,0,115,2,0,0,0, + 0,1,122,30,95,78,97,109,101,115,112,97,99,101,76,111, + 97,100,101,114,46,99,114,101,97,116,101,95,109,111,100,117, + 108,101,99,2,0,0,0,0,0,0,0,0,0,0,0,2, + 0,0,0,1,0,0,0,67,0,0,0,115,4,0,0,0, + 100,0,83,0,114,109,0,0,0,114,5,0,0,0,114,6, + 1,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, + 0,0,114,217,0,0,0,182,4,0,0,115,2,0,0,0, + 0,1,122,28,95,78,97,109,101,115,112,97,99,101,76,111, + 97,100,101,114,46,101,120,101,99,95,109,111,100,117,108,101, + 99,2,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,4,0,0,0,67,0,0,0,115,26,0,0,0,116,0, + 160,1,100,1,124,0,106,2,161,2,1,0,116,0,160,3, + 124,0,124,1,161,2,83,0,41,2,122,98,76,111,97,100, + 32,97,32,110,97,109,101,115,112,97,99,101,32,109,111,100, + 117,108,101,46,10,10,32,32,32,32,32,32,32,32,84,104, + 105,115,32,109,101,116,104,111,100,32,105,115,32,100,101,112, + 114,101,99,97,116,101,100,46,32,32,85,115,101,32,101,120, + 101,99,95,109,111,100,117,108,101,40,41,32,105,110,115,116, + 101,97,100,46,10,10,32,32,32,32,32,32,32,32,122,38, + 110,97,109,101,115,112,97,99,101,32,109,111,100,117,108,101, + 32,108,111,97,100,101,100,32,119,105,116,104,32,112,97,116, + 104,32,123,33,114,125,41,4,114,134,0,0,0,114,149,0, + 0,0,114,15,1,0,0,114,218,0,0,0,114,219,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,220,0,0,0,185,4,0,0,115,8,0,0,0,0,7, + 6,1,4,255,4,2,122,28,95,78,97,109,101,115,112,97, + 99,101,76,111,97,100,101,114,46,108,111,97,100,95,109,111, + 100,117,108,101,78,41,12,114,125,0,0,0,114,124,0,0, + 0,114,126,0,0,0,114,209,0,0,0,114,207,0,0,0, + 114,36,1,0,0,114,182,0,0,0,114,229,0,0,0,114, + 213,0,0,0,114,212,0,0,0,114,217,0,0,0,114,220, + 0,0,0,114,5,0,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,35,1,0,0,157,4,0,0, + 115,18,0,0,0,8,1,8,3,2,1,10,8,8,3,8, + 3,8,3,8,3,8,3,114,35,1,0,0,99,0,0,0, + 0,0,0,0,0,0,0,0,0,0,0,0,0,4,0,0, + 0,64,0,0,0,115,118,0,0,0,101,0,90,1,100,0, + 90,2,100,1,90,3,101,4,100,2,100,3,132,0,131,1, + 90,5,101,4,100,4,100,5,132,0,131,1,90,6,101,4, + 100,6,100,7,132,0,131,1,90,7,101,4,100,8,100,9, + 132,0,131,1,90,8,101,4,100,19,100,11,100,12,132,1, + 131,1,90,9,101,4,100,20,100,13,100,14,132,1,131,1, + 90,10,101,4,100,21,100,15,100,16,132,1,131,1,90,11, + 101,4,100,17,100,18,132,0,131,1,90,12,100,10,83,0, + 41,22,218,10,80,97,116,104,70,105,110,100,101,114,122,62, + 77,101,116,97,32,112,97,116,104,32,102,105,110,100,101,114, + 32,102,111,114,32,115,121,115,46,112,97,116,104,32,97,110, + 100,32,112,97,99,107,97,103,101,32,95,95,112,97,116,104, + 95,95,32,97,116,116,114,105,98,117,116,101,115,46,99,1, + 0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,4, + 0,0,0,67,0,0,0,115,64,0,0,0,116,0,116,1, + 106,2,160,3,161,0,131,1,68,0,93,44,92,2,125,1, + 125,2,124,2,100,1,117,0,114,40,116,1,106,2,124,1, + 61,0,113,14,116,4,124,2,100,2,131,2,114,14,124,2, + 160,5,161,0,1,0,113,14,100,1,83,0,41,3,122,125, + 67,97,108,108,32,116,104,101,32,105,110,118,97,108,105,100, + 97,116,101,95,99,97,99,104,101,115,40,41,32,109,101,116, + 104,111,100,32,111,110,32,97,108,108,32,112,97,116,104,32, + 101,110,116,114,121,32,102,105,110,100,101,114,115,10,32,32, + 32,32,32,32,32,32,115,116,111,114,101,100,32,105,110,32, + 115,121,115,46,112,97,116,104,95,105,109,112,111,114,116,101, + 114,95,99,97,99,104,101,115,32,40,119,104,101,114,101,32, + 105,109,112,108,101,109,101,110,116,101,100,41,46,78,218,17, + 105,110,118,97,108,105,100,97,116,101,95,99,97,99,104,101, + 115,41,6,218,4,108,105,115,116,114,1,0,0,0,218,19, 112,97,116,104,95,105,109,112,111,114,116,101,114,95,99,97, - 99,104,101,226,4,0,0,115,22,0,0,0,0,8,8,1, - 2,1,12,1,12,3,8,1,2,1,14,1,12,1,10,1, - 16,1,122,31,80,97,116,104,70,105,110,100,101,114,46,95, + 99,104,101,218,5,105,116,101,109,115,114,128,0,0,0,114, + 38,1,0,0,41,3,114,193,0,0,0,114,116,0,0,0, + 218,6,102,105,110,100,101,114,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,114,38,1,0,0,203,4,0,0, + 115,10,0,0,0,0,4,22,1,8,1,10,1,10,1,122, + 28,80,97,116,104,70,105,110,100,101,114,46,105,110,118,97, + 108,105,100,97,116,101,95,99,97,99,104,101,115,99,2,0, + 0,0,0,0,0,0,0,0,0,0,3,0,0,0,9,0, + 0,0,67,0,0,0,115,82,0,0,0,116,0,106,1,100, + 1,117,1,114,28,116,0,106,1,115,28,116,2,160,3,100, + 2,116,4,161,2,1,0,116,0,106,1,68,0,93,42,125, + 2,122,14,124,2,124,1,131,1,87,0,2,0,1,0,83, + 0,4,0,116,5,121,74,1,0,1,0,1,0,89,0,113, + 34,89,0,113,34,48,0,113,34,100,1,83,0,41,3,122, + 46,83,101,97,114,99,104,32,115,121,115,46,112,97,116,104, + 95,104,111,111,107,115,32,102,111,114,32,97,32,102,105,110, + 100,101,114,32,102,111,114,32,39,112,97,116,104,39,46,78, + 122,23,115,121,115,46,112,97,116,104,95,104,111,111,107,115, + 32,105,115,32,101,109,112,116,121,41,6,114,1,0,0,0, + 218,10,112,97,116,104,95,104,111,111,107,115,114,75,0,0, + 0,114,76,0,0,0,114,138,0,0,0,114,117,0,0,0, + 41,3,114,193,0,0,0,114,44,0,0,0,90,4,104,111, + 111,107,114,5,0,0,0,114,5,0,0,0,114,8,0,0, + 0,218,11,95,112,97,116,104,95,104,111,111,107,115,213,4, + 0,0,115,16,0,0,0,0,3,16,1,12,1,10,1,2, + 1,14,1,12,1,12,2,122,22,80,97,116,104,70,105,110, + 100,101,114,46,95,112,97,116,104,95,104,111,111,107,115,99, + 2,0,0,0,0,0,0,0,0,0,0,0,3,0,0,0, + 8,0,0,0,67,0,0,0,115,100,0,0,0,124,1,100, + 1,107,2,114,42,122,12,116,0,160,1,161,0,125,1,87, + 0,110,20,4,0,116,2,121,40,1,0,1,0,1,0,89, + 0,100,2,83,0,48,0,122,14,116,3,106,4,124,1,25, + 0,125,2,87,0,110,38,4,0,116,5,121,94,1,0,1, + 0,1,0,124,0,160,6,124,1,161,1,125,2,124,2,116, + 3,106,4,124,1,60,0,89,0,110,2,48,0,124,2,83, + 0,41,3,122,210,71,101,116,32,116,104,101,32,102,105,110, + 100,101,114,32,102,111,114,32,116,104,101,32,112,97,116,104, + 32,101,110,116,114,121,32,102,114,111,109,32,115,121,115,46, 112,97,116,104,95,105,109,112,111,114,116,101,114,95,99,97, - 99,104,101,99,3,0,0,0,0,0,0,0,0,0,0,0, - 6,0,0,0,4,0,0,0,67,0,0,0,115,82,0,0, - 0,116,0,124,2,100,1,131,2,114,26,124,2,160,1,124, - 1,161,1,92,2,125,3,125,4,110,14,124,2,160,2,124, - 1,161,1,125,3,103,0,125,4,124,3,100,0,117,1,114, - 60,116,3,160,4,124,1,124,3,161,2,83,0,116,3,160, - 5,124,1,100,0,161,2,125,5,124,4,124,5,95,6,124, - 5,83,0,41,2,78,114,137,0,0,0,41,7,114,128,0, - 0,0,114,137,0,0,0,114,206,0,0,0,114,134,0,0, - 0,114,201,0,0,0,114,183,0,0,0,114,178,0,0,0, - 41,6,114,193,0,0,0,114,139,0,0,0,114,42,1,0, - 0,114,140,0,0,0,114,141,0,0,0,114,187,0,0,0, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, - 16,95,108,101,103,97,99,121,95,103,101,116,95,115,112,101, - 99,248,4,0,0,115,18,0,0,0,0,4,10,1,16,2, - 10,1,4,1,8,1,12,1,12,1,6,1,122,27,80,97, - 116,104,70,105,110,100,101,114,46,95,108,101,103,97,99,121, - 95,103,101,116,95,115,112,101,99,78,99,4,0,0,0,0, - 0,0,0,0,0,0,0,9,0,0,0,5,0,0,0,67, - 0,0,0,115,166,0,0,0,103,0,125,4,124,2,68,0, - 93,134,125,5,116,0,124,5,116,1,116,2,102,2,131,2, - 115,28,113,8,124,0,160,3,124,5,161,1,125,6,124,6, - 100,1,117,1,114,8,116,4,124,6,100,2,131,2,114,70, - 124,6,160,5,124,1,124,3,161,2,125,7,110,12,124,0, - 160,6,124,1,124,6,161,2,125,7,124,7,100,1,117,0, - 114,92,113,8,124,7,106,7,100,1,117,1,114,110,124,7, - 2,0,1,0,83,0,124,7,106,8,125,8,124,8,100,1, - 117,0,114,132,116,9,100,3,131,1,130,1,124,4,160,10, - 124,8,161,1,1,0,113,8,116,11,160,12,124,1,100,1, - 161,2,125,7,124,4,124,7,95,8,124,7,83,0,41,4, - 122,63,70,105,110,100,32,116,104,101,32,108,111,97,100,101, - 114,32,111,114,32,110,97,109,101,115,112,97,99,101,95,112, - 97,116,104,32,102,111,114,32,116,104,105,115,32,109,111,100, - 117,108,101,47,112,97,99,107,97,103,101,32,110,97,109,101, - 46,78,114,203,0,0,0,122,19,115,112,101,99,32,109,105, - 115,115,105,110,103,32,108,111,97,100,101,114,41,13,114,161, - 0,0,0,114,84,0,0,0,218,5,98,121,116,101,115,114, - 47,1,0,0,114,128,0,0,0,114,203,0,0,0,114,48, - 1,0,0,114,140,0,0,0,114,178,0,0,0,114,117,0, - 0,0,114,167,0,0,0,114,134,0,0,0,114,183,0,0, - 0,41,9,114,193,0,0,0,114,139,0,0,0,114,44,0, - 0,0,114,202,0,0,0,218,14,110,97,109,101,115,112,97, - 99,101,95,112,97,116,104,90,5,101,110,116,114,121,114,42, - 1,0,0,114,187,0,0,0,114,141,0,0,0,114,5,0, - 0,0,114,5,0,0,0,114,8,0,0,0,218,9,95,103, - 101,116,95,115,112,101,99,7,5,0,0,115,40,0,0,0, - 0,5,4,1,8,1,14,1,2,1,10,1,8,1,10,1, - 14,2,12,1,8,1,2,1,10,1,8,1,6,1,8,1, - 8,5,12,2,12,1,6,1,122,20,80,97,116,104,70,105, - 110,100,101,114,46,95,103,101,116,95,115,112,101,99,99,4, - 0,0,0,0,0,0,0,0,0,0,0,6,0,0,0,5, - 0,0,0,67,0,0,0,115,100,0,0,0,124,2,100,1, - 117,0,114,14,116,0,106,1,125,2,124,0,160,2,124,1, - 124,2,124,3,161,3,125,4,124,4,100,1,117,0,114,40, - 100,1,83,0,124,4,106,3,100,1,117,0,114,92,124,4, - 106,4,125,5,124,5,114,86,100,1,124,4,95,5,116,6, - 124,1,124,5,124,0,106,2,131,3,124,4,95,4,124,4, - 83,0,100,1,83,0,110,4,124,4,83,0,100,1,83,0, - 41,2,122,141,84,114,121,32,116,111,32,102,105,110,100,32, - 97,32,115,112,101,99,32,102,111,114,32,39,102,117,108,108, - 110,97,109,101,39,32,111,110,32,115,121,115,46,112,97,116, - 104,32,111,114,32,39,112,97,116,104,39,46,10,10,32,32, - 32,32,32,32,32,32,84,104,101,32,115,101,97,114,99,104, - 32,105,115,32,98,97,115,101,100,32,111,110,32,115,121,115, - 46,112,97,116,104,95,104,111,111,107,115,32,97,110,100,32, - 115,121,115,46,112,97,116,104,95,105,109,112,111,114,116,101, - 114,95,99,97,99,104,101,46,10,32,32,32,32,32,32,32, - 32,78,41,7,114,1,0,0,0,114,44,0,0,0,114,51, - 1,0,0,114,140,0,0,0,114,178,0,0,0,114,181,0, - 0,0,114,13,1,0,0,41,6,114,193,0,0,0,114,139, - 0,0,0,114,44,0,0,0,114,202,0,0,0,114,187,0, - 0,0,114,50,1,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,203,0,0,0,39,5,0,0,115, - 26,0,0,0,0,6,8,1,6,1,14,1,8,1,4,1, - 10,1,6,1,4,3,6,1,16,1,4,2,6,2,122,20, - 80,97,116,104,70,105,110,100,101,114,46,102,105,110,100,95, - 115,112,101,99,99,3,0,0,0,0,0,0,0,0,0,0, - 0,4,0,0,0,4,0,0,0,67,0,0,0,115,30,0, - 0,0,124,0,160,0,124,1,124,2,161,2,125,3,124,3, - 100,1,117,0,114,24,100,1,83,0,124,3,106,1,83,0, - 41,2,122,170,102,105,110,100,32,116,104,101,32,109,111,100, - 117,108,101,32,111,110,32,115,121,115,46,112,97,116,104,32, - 111,114,32,39,112,97,116,104,39,32,98,97,115,101,100,32, - 111,110,32,115,121,115,46,112,97,116,104,95,104,111,111,107, - 115,32,97,110,100,10,32,32,32,32,32,32,32,32,115,121, - 115,46,112,97,116,104,95,105,109,112,111,114,116,101,114,95, - 99,97,99,104,101,46,10,10,32,32,32,32,32,32,32,32, - 84,104,105,115,32,109,101,116,104,111,100,32,105,115,32,100, - 101,112,114,101,99,97,116,101,100,46,32,32,85,115,101,32, - 102,105,110,100,95,115,112,101,99,40,41,32,105,110,115,116, - 101,97,100,46,10,10,32,32,32,32,32,32,32,32,78,114, - 204,0,0,0,114,205,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,206,0,0,0,63,5,0, - 0,115,8,0,0,0,0,8,12,1,8,1,4,1,122,22, - 80,97,116,104,70,105,110,100,101,114,46,102,105,110,100,95, - 109,111,100,117,108,101,99,1,0,0,0,0,0,0,0,0, - 0,0,0,4,0,0,0,4,0,0,0,79,0,0,0,115, - 28,0,0,0,100,1,100,2,108,0,109,1,125,3,1,0, - 124,3,106,2,124,1,105,0,124,2,164,1,142,1,83,0, - 41,3,97,32,1,0,0,10,32,32,32,32,32,32,32,32, - 70,105,110,100,32,100,105,115,116,114,105,98,117,116,105,111, - 110,115,46,10,10,32,32,32,32,32,32,32,32,82,101,116, - 117,114,110,32,97,110,32,105,116,101,114,97,98,108,101,32, - 111,102,32,97,108,108,32,68,105,115,116,114,105,98,117,116, - 105,111,110,32,105,110,115,116,97,110,99,101,115,32,99,97, - 112,97,98,108,101,32,111,102,10,32,32,32,32,32,32,32, - 32,108,111,97,100,105,110,103,32,116,104,101,32,109,101,116, - 97,100,97,116,97,32,102,111,114,32,112,97,99,107,97,103, - 101,115,32,109,97,116,99,104,105,110,103,32,96,96,99,111, - 110,116,101,120,116,46,110,97,109,101,96,96,10,32,32,32, - 32,32,32,32,32,40,111,114,32,97,108,108,32,110,97,109, - 101,115,32,105,102,32,96,96,78,111,110,101,96,96,32,105, - 110,100,105,99,97,116,101,100,41,32,97,108,111,110,103,32, - 116,104,101,32,112,97,116,104,115,32,105,110,32,116,104,101, - 32,108,105,115,116,10,32,32,32,32,32,32,32,32,111,102, - 32,100,105,114,101,99,116,111,114,105,101,115,32,96,96,99, - 111,110,116,101,120,116,46,112,97,116,104,96,96,46,10,32, - 32,32,32,32,32,32,32,114,73,0,0,0,41,1,218,18, - 77,101,116,97,100,97,116,97,80,97,116,104,70,105,110,100, - 101,114,41,3,90,18,105,109,112,111,114,116,108,105,98,46, - 109,101,116,97,100,97,116,97,114,52,1,0,0,218,18,102, - 105,110,100,95,100,105,115,116,114,105,98,117,116,105,111,110, - 115,41,4,114,193,0,0,0,114,119,0,0,0,114,120,0, - 0,0,114,52,1,0,0,114,5,0,0,0,114,5,0,0, - 0,114,8,0,0,0,114,53,1,0,0,76,5,0,0,115, - 4,0,0,0,0,10,12,1,122,29,80,97,116,104,70,105, - 110,100,101,114,46,102,105,110,100,95,100,105,115,116,114,105, - 98,117,116,105,111,110,115,41,1,78,41,2,78,78,41,1, - 78,41,13,114,125,0,0,0,114,124,0,0,0,114,126,0, - 0,0,114,127,0,0,0,114,207,0,0,0,114,38,1,0, - 0,114,44,1,0,0,114,47,1,0,0,114,48,1,0,0, - 114,51,1,0,0,114,203,0,0,0,114,206,0,0,0,114, - 53,1,0,0,114,5,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,37,1,0,0,199,4,0, - 0,115,34,0,0,0,8,2,4,2,2,1,10,9,2,1, - 10,12,2,1,10,21,2,1,10,14,2,1,12,31,2,1, - 12,23,2,1,12,12,2,1,114,37,1,0,0,99,0,0, - 0,0,0,0,0,0,0,0,0,0,0,0,0,0,3,0, - 0,0,64,0,0,0,115,90,0,0,0,101,0,90,1,100, - 0,90,2,100,1,90,3,100,2,100,3,132,0,90,4,100, - 4,100,5,132,0,90,5,101,6,90,7,100,6,100,7,132, - 0,90,8,100,8,100,9,132,0,90,9,100,19,100,11,100, - 12,132,1,90,10,100,13,100,14,132,0,90,11,101,12,100, - 15,100,16,132,0,131,1,90,13,100,17,100,18,132,0,90, - 14,100,10,83,0,41,20,218,10,70,105,108,101,70,105,110, - 100,101,114,122,172,70,105,108,101,45,98,97,115,101,100,32, - 102,105,110,100,101,114,46,10,10,32,32,32,32,73,110,116, - 101,114,97,99,116,105,111,110,115,32,119,105,116,104,32,116, - 104,101,32,102,105,108,101,32,115,121,115,116,101,109,32,97, - 114,101,32,99,97,99,104,101,100,32,102,111,114,32,112,101, - 114,102,111,114,109,97,110,99,101,44,32,98,101,105,110,103, - 10,32,32,32,32,114,101,102,114,101,115,104,101,100,32,119, - 104,101,110,32,116,104,101,32,100,105,114,101,99,116,111,114, - 121,32,116,104,101,32,102,105,110,100,101,114,32,105,115,32, - 104,97,110,100,108,105,110,103,32,104,97,115,32,98,101,101, - 110,32,109,111,100,105,102,105,101,100,46,10,10,32,32,32, - 32,99,2,0,0,0,0,0,0,0,0,0,0,0,5,0, - 0,0,6,0,0,0,7,0,0,0,115,84,0,0,0,103, - 0,125,3,124,2,68,0,93,32,92,2,137,0,125,4,124, - 3,160,0,135,0,102,1,100,1,100,2,132,8,124,4,68, - 0,131,1,161,1,1,0,113,8,124,3,124,0,95,1,124, - 1,112,54,100,3,124,0,95,2,100,4,124,0,95,3,116, - 4,131,0,124,0,95,5,116,4,131,0,124,0,95,6,100, - 5,83,0,41,6,122,154,73,110,105,116,105,97,108,105,122, - 101,32,119,105,116,104,32,116,104,101,32,112,97,116,104,32, - 116,111,32,115,101,97,114,99,104,32,111,110,32,97,110,100, - 32,97,32,118,97,114,105,97,98,108,101,32,110,117,109,98, - 101,114,32,111,102,10,32,32,32,32,32,32,32,32,50,45, - 116,117,112,108,101,115,32,99,111,110,116,97,105,110,105,110, - 103,32,116,104,101,32,108,111,97,100,101,114,32,97,110,100, - 32,116,104,101,32,102,105,108,101,32,115,117,102,102,105,120, - 101,115,32,116,104,101,32,108,111,97,100,101,114,10,32,32, - 32,32,32,32,32,32,114,101,99,111,103,110,105,122,101,115, - 46,99,1,0,0,0,0,0,0,0,0,0,0,0,2,0, - 0,0,3,0,0,0,51,0,0,0,115,22,0,0,0,124, - 0,93,14,125,1,124,1,136,0,102,2,86,0,1,0,113, - 2,100,0,83,0,114,109,0,0,0,114,5,0,0,0,114, - 7,1,0,0,169,1,114,140,0,0,0,114,5,0,0,0, - 114,8,0,0,0,114,10,1,0,0,105,5,0,0,243,0, - 0,0,0,122,38,70,105,108,101,70,105,110,100,101,114,46, - 95,95,105,110,105,116,95,95,46,60,108,111,99,97,108,115, - 62,46,60,103,101,110,101,120,112,114,62,114,71,0,0,0, - 114,104,0,0,0,78,41,7,114,167,0,0,0,218,8,95, - 108,111,97,100,101,114,115,114,44,0,0,0,218,11,95,112, - 97,116,104,95,109,116,105,109,101,218,3,115,101,116,218,11, - 95,112,97,116,104,95,99,97,99,104,101,218,19,95,114,101, - 108,97,120,101,100,95,112,97,116,104,95,99,97,99,104,101, - 41,5,114,118,0,0,0,114,44,0,0,0,218,14,108,111, - 97,100,101,114,95,100,101,116,97,105,108,115,90,7,108,111, - 97,100,101,114,115,114,189,0,0,0,114,5,0,0,0,114, - 55,1,0,0,114,8,0,0,0,114,209,0,0,0,99,5, - 0,0,115,16,0,0,0,0,4,4,1,12,1,26,1,6, - 2,10,1,6,1,8,1,122,19,70,105,108,101,70,105,110, - 100,101,114,46,95,95,105,110,105,116,95,95,99,1,0,0, - 0,0,0,0,0,0,0,0,0,1,0,0,0,2,0,0, - 0,67,0,0,0,115,10,0,0,0,100,1,124,0,95,0, - 100,2,83,0,41,3,122,31,73,110,118,97,108,105,100,97, - 116,101,32,116,104,101,32,100,105,114,101,99,116,111,114,121, - 32,109,116,105,109,101,46,114,104,0,0,0,78,41,1,114, - 58,1,0,0,114,246,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,38,1,0,0,113,5,0, - 0,115,2,0,0,0,0,2,122,28,70,105,108,101,70,105, - 110,100,101,114,46,105,110,118,97,108,105,100,97,116,101,95, - 99,97,99,104,101,115,99,2,0,0,0,0,0,0,0,0, - 0,0,0,3,0,0,0,3,0,0,0,67,0,0,0,115, - 42,0,0,0,124,0,160,0,124,1,161,1,125,2,124,2, - 100,1,117,0,114,26,100,1,103,0,102,2,83,0,124,2, - 106,1,124,2,106,2,112,38,103,0,102,2,83,0,41,2, - 122,197,84,114,121,32,116,111,32,102,105,110,100,32,97,32, - 108,111,97,100,101,114,32,102,111,114,32,116,104,101,32,115, - 112,101,99,105,102,105,101,100,32,109,111,100,117,108,101,44, - 32,111,114,32,116,104,101,32,110,97,109,101,115,112,97,99, - 101,10,32,32,32,32,32,32,32,32,112,97,99,107,97,103, - 101,32,112,111,114,116,105,111,110,115,46,32,82,101,116,117, - 114,110,115,32,40,108,111,97,100,101,114,44,32,108,105,115, - 116,45,111,102,45,112,111,114,116,105,111,110,115,41,46,10, - 10,32,32,32,32,32,32,32,32,84,104,105,115,32,109,101, - 116,104,111,100,32,105,115,32,100,101,112,114,101,99,97,116, - 101,100,46,32,32,85,115,101,32,102,105,110,100,95,115,112, - 101,99,40,41,32,105,110,115,116,101,97,100,46,10,10,32, - 32,32,32,32,32,32,32,78,41,3,114,203,0,0,0,114, - 140,0,0,0,114,178,0,0,0,41,3,114,118,0,0,0, - 114,139,0,0,0,114,187,0,0,0,114,5,0,0,0,114, - 5,0,0,0,114,8,0,0,0,114,137,0,0,0,119,5, - 0,0,115,8,0,0,0,0,7,10,1,8,1,8,1,122, - 22,70,105,108,101,70,105,110,100,101,114,46,102,105,110,100, - 95,108,111,97,100,101,114,99,6,0,0,0,0,0,0,0, - 0,0,0,0,7,0,0,0,6,0,0,0,67,0,0,0, - 115,26,0,0,0,124,1,124,2,124,3,131,2,125,6,116, - 0,124,2,124,3,124,6,124,4,100,1,141,4,83,0,41, - 2,78,114,177,0,0,0,41,1,114,190,0,0,0,41,7, - 114,118,0,0,0,114,188,0,0,0,114,139,0,0,0,114, - 44,0,0,0,90,4,115,109,115,108,114,202,0,0,0,114, - 140,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,114,51,1,0,0,131,5,0,0,115,8,0,0, - 0,0,1,10,1,8,1,2,255,122,20,70,105,108,101,70, - 105,110,100,101,114,46,95,103,101,116,95,115,112,101,99,78, - 99,3,0,0,0,0,0,0,0,0,0,0,0,14,0,0, - 0,8,0,0,0,67,0,0,0,115,96,1,0,0,100,1, - 125,3,124,1,160,0,100,2,161,1,100,3,25,0,125,4, - 122,24,116,1,124,0,106,2,112,34,116,3,160,4,161,0, - 131,1,106,5,125,5,87,0,110,22,4,0,116,6,121,64, - 1,0,1,0,1,0,100,4,125,5,89,0,110,2,48,0, - 124,5,124,0,106,7,107,3,114,90,124,0,160,8,161,0, - 1,0,124,5,124,0,95,7,116,9,131,0,114,112,124,0, - 106,10,125,6,124,4,160,11,161,0,125,7,110,10,124,0, - 106,12,125,6,124,4,125,7,124,7,124,6,118,0,114,216, - 116,13,124,0,106,2,124,4,131,2,125,8,124,0,106,14, - 68,0,93,58,92,2,125,9,125,10,100,5,124,9,23,0, - 125,11,116,13,124,8,124,11,131,2,125,12,116,15,124,12, - 131,1,114,148,124,0,160,16,124,10,124,1,124,12,124,8, - 103,1,124,2,161,5,2,0,1,0,83,0,113,148,116,17, - 124,8,131,1,125,3,124,0,106,14,68,0,93,82,92,2, - 125,9,125,10,116,13,124,0,106,2,124,4,124,9,23,0, - 131,2,125,12,116,18,106,19,100,6,124,12,100,3,100,7, - 141,3,1,0,124,7,124,9,23,0,124,6,118,0,114,222, - 116,15,124,12,131,1,114,222,124,0,160,16,124,10,124,1, - 124,12,100,8,124,2,161,5,2,0,1,0,83,0,113,222, - 124,3,144,1,114,92,116,18,160,19,100,9,124,8,161,2, - 1,0,116,18,160,20,124,1,100,8,161,2,125,13,124,8, - 103,1,124,13,95,21,124,13,83,0,100,8,83,0,41,10, - 122,111,84,114,121,32,116,111,32,102,105,110,100,32,97,32, - 115,112,101,99,32,102,111,114,32,116,104,101,32,115,112,101, - 99,105,102,105,101,100,32,109,111,100,117,108,101,46,10,10, - 32,32,32,32,32,32,32,32,82,101,116,117,114,110,115,32, - 116,104,101,32,109,97,116,99,104,105,110,103,32,115,112,101, - 99,44,32,111,114,32,78,111,110,101,32,105,102,32,110,111, - 116,32,102,111,117,110,100,46,10,32,32,32,32,32,32,32, - 32,70,114,71,0,0,0,114,28,0,0,0,114,104,0,0, - 0,114,209,0,0,0,122,9,116,114,121,105,110,103,32,123, - 125,41,1,90,9,118,101,114,98,111,115,105,116,121,78,122, - 25,112,111,115,115,105,98,108,101,32,110,97,109,101,115,112, - 97,99,101,32,102,111,114,32,123,125,41,22,114,41,0,0, - 0,114,49,0,0,0,114,44,0,0,0,114,4,0,0,0, - 114,55,0,0,0,114,0,1,0,0,114,50,0,0,0,114, - 58,1,0,0,218,11,95,102,105,108,108,95,99,97,99,104, - 101,114,9,0,0,0,114,61,1,0,0,114,105,0,0,0, - 114,60,1,0,0,114,38,0,0,0,114,57,1,0,0,114, - 54,0,0,0,114,51,1,0,0,114,56,0,0,0,114,134, - 0,0,0,114,149,0,0,0,114,183,0,0,0,114,178,0, - 0,0,41,14,114,118,0,0,0,114,139,0,0,0,114,202, - 0,0,0,90,12,105,115,95,110,97,109,101,115,112,97,99, - 101,90,11,116,97,105,108,95,109,111,100,117,108,101,114,169, - 0,0,0,90,5,99,97,99,104,101,90,12,99,97,99,104, - 101,95,109,111,100,117,108,101,90,9,98,97,115,101,95,112, - 97,116,104,114,8,1,0,0,114,188,0,0,0,90,13,105, - 110,105,116,95,102,105,108,101,110,97,109,101,90,9,102,117, - 108,108,95,112,97,116,104,114,187,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,114,203,0,0,0, - 136,5,0,0,115,72,0,0,0,0,5,4,1,14,1,2, - 1,24,1,12,1,10,1,10,1,8,1,6,2,6,1,6, - 1,10,2,6,1,4,2,8,1,12,1,14,1,8,1,10, - 1,8,1,26,4,8,2,14,1,16,1,16,1,12,1,8, - 1,10,1,4,255,10,2,6,1,12,1,12,1,8,1,4, - 1,122,20,70,105,108,101,70,105,110,100,101,114,46,102,105, - 110,100,95,115,112,101,99,99,1,0,0,0,0,0,0,0, - 0,0,0,0,9,0,0,0,10,0,0,0,67,0,0,0, - 115,188,0,0,0,124,0,106,0,125,1,122,22,116,1,160, - 2,124,1,112,22,116,1,160,3,161,0,161,1,125,2,87, - 0,110,28,4,0,116,4,116,5,116,6,102,3,121,56,1, - 0,1,0,1,0,103,0,125,2,89,0,110,2,48,0,116, - 7,106,8,160,9,100,1,161,1,115,82,116,10,124,2,131, - 1,124,0,95,11,110,74,116,10,131,0,125,3,124,2,68, - 0,93,56,125,4,124,4,160,12,100,2,161,1,92,3,125, - 5,125,6,125,7,124,6,114,134,100,3,160,13,124,5,124, - 7,160,14,161,0,161,2,125,8,110,4,124,5,125,8,124, - 3,160,15,124,8,161,1,1,0,113,92,124,3,124,0,95, - 11,116,7,106,8,160,9,116,16,161,1,114,184,100,4,100, - 5,132,0,124,2,68,0,131,1,124,0,95,17,100,6,83, - 0,41,7,122,68,70,105,108,108,32,116,104,101,32,99,97, - 99,104,101,32,111,102,32,112,111,116,101,110,116,105,97,108, - 32,109,111,100,117,108,101,115,32,97,110,100,32,112,97,99, - 107,97,103,101,115,32,102,111,114,32,116,104,105,115,32,100, - 105,114,101,99,116,111,114,121,46,114,0,0,0,0,114,71, - 0,0,0,114,61,0,0,0,99,1,0,0,0,0,0,0, - 0,0,0,0,0,2,0,0,0,4,0,0,0,83,0,0, - 0,115,20,0,0,0,104,0,124,0,93,12,125,1,124,1, - 160,0,161,0,146,2,113,4,83,0,114,5,0,0,0,41, - 1,114,105,0,0,0,41,2,114,32,0,0,0,90,2,102, - 110,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, - 218,9,60,115,101,116,99,111,109,112,62,213,5,0,0,114, - 56,1,0,0,122,41,70,105,108,101,70,105,110,100,101,114, - 46,95,102,105,108,108,95,99,97,99,104,101,46,60,108,111, - 99,97,108,115,62,46,60,115,101,116,99,111,109,112,62,78, - 41,18,114,44,0,0,0,114,4,0,0,0,90,7,108,105, - 115,116,100,105,114,114,55,0,0,0,114,45,1,0,0,218, - 15,80,101,114,109,105,115,115,105,111,110,69,114,114,111,114, - 218,18,78,111,116,65,68,105,114,101,99,116,111,114,121,69, - 114,114,111,114,114,1,0,0,0,114,10,0,0,0,114,11, - 0,0,0,114,59,1,0,0,114,60,1,0,0,114,100,0, - 0,0,114,62,0,0,0,114,105,0,0,0,218,3,97,100, - 100,114,12,0,0,0,114,61,1,0,0,41,9,114,118,0, - 0,0,114,44,0,0,0,90,8,99,111,110,116,101,110,116, - 115,90,21,108,111,119,101,114,95,115,117,102,102,105,120,95, - 99,111,110,116,101,110,116,115,114,33,1,0,0,114,116,0, - 0,0,114,20,1,0,0,114,8,1,0,0,90,8,110,101, - 119,95,110,97,109,101,114,5,0,0,0,114,5,0,0,0, - 114,8,0,0,0,114,63,1,0,0,184,5,0,0,115,34, - 0,0,0,0,2,6,1,2,1,22,1,18,3,10,3,12, - 1,12,7,6,1,8,1,16,1,4,1,18,2,4,1,12, - 1,6,1,12,1,122,22,70,105,108,101,70,105,110,100,101, - 114,46,95,102,105,108,108,95,99,97,99,104,101,99,1,0, - 0,0,0,0,0,0,0,0,0,0,3,0,0,0,3,0, - 0,0,7,0,0,0,115,18,0,0,0,135,0,135,1,102, - 2,100,1,100,2,132,8,125,2,124,2,83,0,41,3,97, - 20,1,0,0,65,32,99,108,97,115,115,32,109,101,116,104, - 111,100,32,119,104,105,99,104,32,114,101,116,117,114,110,115, - 32,97,32,99,108,111,115,117,114,101,32,116,111,32,117,115, - 101,32,111,110,32,115,121,115,46,112,97,116,104,95,104,111, - 111,107,10,32,32,32,32,32,32,32,32,119,104,105,99,104, - 32,119,105,108,108,32,114,101,116,117,114,110,32,97,110,32, - 105,110,115,116,97,110,99,101,32,117,115,105,110,103,32,116, - 104,101,32,115,112,101,99,105,102,105,101,100,32,108,111,97, - 100,101,114,115,32,97,110,100,32,116,104,101,32,112,97,116, - 104,10,32,32,32,32,32,32,32,32,99,97,108,108,101,100, - 32,111,110,32,116,104,101,32,99,108,111,115,117,114,101,46, - 10,10,32,32,32,32,32,32,32,32,73,102,32,116,104,101, - 32,112,97,116,104,32,99,97,108,108,101,100,32,111,110,32, - 116,104,101,32,99,108,111,115,117,114,101,32,105,115,32,110, - 111,116,32,97,32,100,105,114,101,99,116,111,114,121,44,32, - 73,109,112,111,114,116,69,114,114,111,114,32,105,115,10,32, - 32,32,32,32,32,32,32,114,97,105,115,101,100,46,10,10, - 32,32,32,32,32,32,32,32,99,1,0,0,0,0,0,0, - 0,0,0,0,0,1,0,0,0,4,0,0,0,19,0,0, - 0,115,36,0,0,0,116,0,124,0,131,1,115,20,116,1, - 100,1,124,0,100,2,141,2,130,1,136,0,124,0,103,1, - 136,1,162,1,82,0,142,0,83,0,41,3,122,45,80,97, - 116,104,32,104,111,111,107,32,102,111,114,32,105,109,112,111, - 114,116,108,105,98,46,109,97,99,104,105,110,101,114,121,46, - 70,105,108,101,70,105,110,100,101,114,46,122,30,111,110,108, - 121,32,100,105,114,101,99,116,111,114,105,101,115,32,97,114, - 101,32,115,117,112,112,111,114,116,101,100,114,48,0,0,0, - 41,2,114,56,0,0,0,114,117,0,0,0,114,48,0,0, - 0,169,2,114,193,0,0,0,114,62,1,0,0,114,5,0, - 0,0,114,8,0,0,0,218,24,112,97,116,104,95,104,111, - 111,107,95,102,111,114,95,70,105,108,101,70,105,110,100,101, - 114,225,5,0,0,115,6,0,0,0,0,2,8,1,12,1, - 122,54,70,105,108,101,70,105,110,100,101,114,46,112,97,116, - 104,95,104,111,111,107,46,60,108,111,99,97,108,115,62,46, - 112,97,116,104,95,104,111,111,107,95,102,111,114,95,70,105, - 108,101,70,105,110,100,101,114,114,5,0,0,0,41,3,114, - 193,0,0,0,114,62,1,0,0,114,69,1,0,0,114,5, - 0,0,0,114,68,1,0,0,114,8,0,0,0,218,9,112, - 97,116,104,95,104,111,111,107,215,5,0,0,115,4,0,0, - 0,0,10,14,6,122,20,70,105,108,101,70,105,110,100,101, - 114,46,112,97,116,104,95,104,111,111,107,99,1,0,0,0, - 0,0,0,0,0,0,0,0,1,0,0,0,3,0,0,0, - 67,0,0,0,115,12,0,0,0,100,1,160,0,124,0,106, - 1,161,1,83,0,41,2,78,122,16,70,105,108,101,70,105, - 110,100,101,114,40,123,33,114,125,41,41,2,114,62,0,0, - 0,114,44,0,0,0,114,246,0,0,0,114,5,0,0,0, - 114,5,0,0,0,114,8,0,0,0,114,31,1,0,0,233, - 5,0,0,115,2,0,0,0,0,1,122,19,70,105,108,101, - 70,105,110,100,101,114,46,95,95,114,101,112,114,95,95,41, - 1,78,41,15,114,125,0,0,0,114,124,0,0,0,114,126, - 0,0,0,114,127,0,0,0,114,209,0,0,0,114,38,1, - 0,0,114,143,0,0,0,114,206,0,0,0,114,137,0,0, - 0,114,51,1,0,0,114,203,0,0,0,114,63,1,0,0, - 114,207,0,0,0,114,70,1,0,0,114,31,1,0,0,114, + 99,104,101,46,10,10,32,32,32,32,32,32,32,32,73,102, + 32,116,104,101,32,112,97,116,104,32,101,110,116,114,121,32, + 105,115,32,110,111,116,32,105,110,32,116,104,101,32,99,97, + 99,104,101,44,32,102,105,110,100,32,116,104,101,32,97,112, + 112,114,111,112,114,105,97,116,101,32,102,105,110,100,101,114, + 10,32,32,32,32,32,32,32,32,97,110,100,32,99,97,99, + 104,101,32,105,116,46,32,73,102,32,110,111,32,102,105,110, + 100,101,114,32,105,115,32,97,118,97,105,108,97,98,108,101, + 44,32,115,116,111,114,101,32,78,111,110,101,46,10,10,32, + 32,32,32,32,32,32,32,114,40,0,0,0,78,41,7,114, + 4,0,0,0,114,55,0,0,0,218,17,70,105,108,101,78, + 111,116,70,111,117,110,100,69,114,114,111,114,114,1,0,0, + 0,114,40,1,0,0,218,8,75,101,121,69,114,114,111,114, + 114,44,1,0,0,41,3,114,193,0,0,0,114,44,0,0, + 0,114,42,1,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,20,95,112,97,116,104,95,105,109,112, + 111,114,116,101,114,95,99,97,99,104,101,226,4,0,0,115, + 22,0,0,0,0,8,8,1,2,1,12,1,12,3,8,1, + 2,1,14,1,12,1,10,1,16,1,122,31,80,97,116,104, + 70,105,110,100,101,114,46,95,112,97,116,104,95,105,109,112, + 111,114,116,101,114,95,99,97,99,104,101,99,3,0,0,0, + 0,0,0,0,0,0,0,0,6,0,0,0,4,0,0,0, + 67,0,0,0,115,82,0,0,0,116,0,124,2,100,1,131, + 2,114,26,124,2,160,1,124,1,161,1,92,2,125,3,125, + 4,110,14,124,2,160,2,124,1,161,1,125,3,103,0,125, + 4,124,3,100,0,117,1,114,60,116,3,160,4,124,1,124, + 3,161,2,83,0,116,3,160,5,124,1,100,0,161,2,125, + 5,124,4,124,5,95,6,124,5,83,0,41,2,78,114,137, + 0,0,0,41,7,114,128,0,0,0,114,137,0,0,0,114, + 206,0,0,0,114,134,0,0,0,114,201,0,0,0,114,183, + 0,0,0,114,178,0,0,0,41,6,114,193,0,0,0,114, + 139,0,0,0,114,42,1,0,0,114,140,0,0,0,114,141, + 0,0,0,114,187,0,0,0,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,218,16,95,108,101,103,97,99,121, + 95,103,101,116,95,115,112,101,99,248,4,0,0,115,18,0, + 0,0,0,4,10,1,16,2,10,1,4,1,8,1,12,1, + 12,1,6,1,122,27,80,97,116,104,70,105,110,100,101,114, + 46,95,108,101,103,97,99,121,95,103,101,116,95,115,112,101, + 99,78,99,4,0,0,0,0,0,0,0,0,0,0,0,9, + 0,0,0,5,0,0,0,67,0,0,0,115,166,0,0,0, + 103,0,125,4,124,2,68,0,93,134,125,5,116,0,124,5, + 116,1,116,2,102,2,131,2,115,28,113,8,124,0,160,3, + 124,5,161,1,125,6,124,6,100,1,117,1,114,8,116,4, + 124,6,100,2,131,2,114,70,124,6,160,5,124,1,124,3, + 161,2,125,7,110,12,124,0,160,6,124,1,124,6,161,2, + 125,7,124,7,100,1,117,0,114,92,113,8,124,7,106,7, + 100,1,117,1,114,110,124,7,2,0,1,0,83,0,124,7, + 106,8,125,8,124,8,100,1,117,0,114,132,116,9,100,3, + 131,1,130,1,124,4,160,10,124,8,161,1,1,0,113,8, + 116,11,160,12,124,1,100,1,161,2,125,7,124,4,124,7, + 95,8,124,7,83,0,41,4,122,63,70,105,110,100,32,116, + 104,101,32,108,111,97,100,101,114,32,111,114,32,110,97,109, + 101,115,112,97,99,101,95,112,97,116,104,32,102,111,114,32, + 116,104,105,115,32,109,111,100,117,108,101,47,112,97,99,107, + 97,103,101,32,110,97,109,101,46,78,114,203,0,0,0,122, + 19,115,112,101,99,32,109,105,115,115,105,110,103,32,108,111, + 97,100,101,114,41,13,114,161,0,0,0,114,84,0,0,0, + 218,5,98,121,116,101,115,114,47,1,0,0,114,128,0,0, + 0,114,203,0,0,0,114,48,1,0,0,114,140,0,0,0, + 114,178,0,0,0,114,117,0,0,0,114,167,0,0,0,114, + 134,0,0,0,114,183,0,0,0,41,9,114,193,0,0,0, + 114,139,0,0,0,114,44,0,0,0,114,202,0,0,0,218, + 14,110,97,109,101,115,112,97,99,101,95,112,97,116,104,90, + 5,101,110,116,114,121,114,42,1,0,0,114,187,0,0,0, + 114,141,0,0,0,114,5,0,0,0,114,5,0,0,0,114, + 8,0,0,0,218,9,95,103,101,116,95,115,112,101,99,7, + 5,0,0,115,40,0,0,0,0,5,4,1,8,1,14,1, + 2,1,10,1,8,1,10,1,14,2,12,1,8,1,2,1, + 10,1,8,1,6,1,8,1,8,5,12,2,12,1,6,1, + 122,20,80,97,116,104,70,105,110,100,101,114,46,95,103,101, + 116,95,115,112,101,99,99,4,0,0,0,0,0,0,0,0, + 0,0,0,6,0,0,0,5,0,0,0,67,0,0,0,115, + 100,0,0,0,124,2,100,1,117,0,114,14,116,0,106,1, + 125,2,124,0,160,2,124,1,124,2,124,3,161,3,125,4, + 124,4,100,1,117,0,114,40,100,1,83,0,124,4,106,3, + 100,1,117,0,114,92,124,4,106,4,125,5,124,5,114,86, + 100,1,124,4,95,5,116,6,124,1,124,5,124,0,106,2, + 131,3,124,4,95,4,124,4,83,0,100,1,83,0,110,4, + 124,4,83,0,100,1,83,0,41,2,122,141,84,114,121,32, + 116,111,32,102,105,110,100,32,97,32,115,112,101,99,32,102, + 111,114,32,39,102,117,108,108,110,97,109,101,39,32,111,110, + 32,115,121,115,46,112,97,116,104,32,111,114,32,39,112,97, + 116,104,39,46,10,10,32,32,32,32,32,32,32,32,84,104, + 101,32,115,101,97,114,99,104,32,105,115,32,98,97,115,101, + 100,32,111,110,32,115,121,115,46,112,97,116,104,95,104,111, + 111,107,115,32,97,110,100,32,115,121,115,46,112,97,116,104, + 95,105,109,112,111,114,116,101,114,95,99,97,99,104,101,46, + 10,32,32,32,32,32,32,32,32,78,41,7,114,1,0,0, + 0,114,44,0,0,0,114,51,1,0,0,114,140,0,0,0, + 114,178,0,0,0,114,181,0,0,0,114,13,1,0,0,41, + 6,114,193,0,0,0,114,139,0,0,0,114,44,0,0,0, + 114,202,0,0,0,114,187,0,0,0,114,50,1,0,0,114, + 5,0,0,0,114,5,0,0,0,114,8,0,0,0,114,203, + 0,0,0,39,5,0,0,115,26,0,0,0,0,6,8,1, + 6,1,14,1,8,1,4,1,10,1,6,1,4,3,6,1, + 16,1,4,2,6,2,122,20,80,97,116,104,70,105,110,100, + 101,114,46,102,105,110,100,95,115,112,101,99,99,3,0,0, + 0,0,0,0,0,0,0,0,0,4,0,0,0,4,0,0, + 0,67,0,0,0,115,30,0,0,0,124,0,160,0,124,1, + 124,2,161,2,125,3,124,3,100,1,117,0,114,24,100,1, + 83,0,124,3,106,1,83,0,41,2,122,170,102,105,110,100, + 32,116,104,101,32,109,111,100,117,108,101,32,111,110,32,115, + 121,115,46,112,97,116,104,32,111,114,32,39,112,97,116,104, + 39,32,98,97,115,101,100,32,111,110,32,115,121,115,46,112, + 97,116,104,95,104,111,111,107,115,32,97,110,100,10,32,32, + 32,32,32,32,32,32,115,121,115,46,112,97,116,104,95,105, + 109,112,111,114,116,101,114,95,99,97,99,104,101,46,10,10, + 32,32,32,32,32,32,32,32,84,104,105,115,32,109,101,116, + 104,111,100,32,105,115,32,100,101,112,114,101,99,97,116,101, + 100,46,32,32,85,115,101,32,102,105,110,100,95,115,112,101, + 99,40,41,32,105,110,115,116,101,97,100,46,10,10,32,32, + 32,32,32,32,32,32,78,114,204,0,0,0,114,205,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,206,0,0,0,63,5,0,0,115,8,0,0,0,0,8, + 12,1,8,1,4,1,122,22,80,97,116,104,70,105,110,100, + 101,114,46,102,105,110,100,95,109,111,100,117,108,101,99,1, + 0,0,0,0,0,0,0,0,0,0,0,4,0,0,0,4, + 0,0,0,79,0,0,0,115,28,0,0,0,100,1,100,2, + 108,0,109,1,125,3,1,0,124,3,106,2,124,1,105,0, + 124,2,164,1,142,1,83,0,41,3,97,32,1,0,0,10, + 32,32,32,32,32,32,32,32,70,105,110,100,32,100,105,115, + 116,114,105,98,117,116,105,111,110,115,46,10,10,32,32,32, + 32,32,32,32,32,82,101,116,117,114,110,32,97,110,32,105, + 116,101,114,97,98,108,101,32,111,102,32,97,108,108,32,68, + 105,115,116,114,105,98,117,116,105,111,110,32,105,110,115,116, + 97,110,99,101,115,32,99,97,112,97,98,108,101,32,111,102, + 10,32,32,32,32,32,32,32,32,108,111,97,100,105,110,103, + 32,116,104,101,32,109,101,116,97,100,97,116,97,32,102,111, + 114,32,112,97,99,107,97,103,101,115,32,109,97,116,99,104, + 105,110,103,32,96,96,99,111,110,116,101,120,116,46,110,97, + 109,101,96,96,10,32,32,32,32,32,32,32,32,40,111,114, + 32,97,108,108,32,110,97,109,101,115,32,105,102,32,96,96, + 78,111,110,101,96,96,32,105,110,100,105,99,97,116,101,100, + 41,32,97,108,111,110,103,32,116,104,101,32,112,97,116,104, + 115,32,105,110,32,116,104,101,32,108,105,115,116,10,32,32, + 32,32,32,32,32,32,111,102,32,100,105,114,101,99,116,111, + 114,105,101,115,32,96,96,99,111,110,116,101,120,116,46,112, + 97,116,104,96,96,46,10,32,32,32,32,32,32,32,32,114, + 73,0,0,0,41,1,218,18,77,101,116,97,100,97,116,97, + 80,97,116,104,70,105,110,100,101,114,41,3,90,18,105,109, + 112,111,114,116,108,105,98,46,109,101,116,97,100,97,116,97, + 114,52,1,0,0,218,18,102,105,110,100,95,100,105,115,116, + 114,105,98,117,116,105,111,110,115,41,4,114,193,0,0,0, + 114,119,0,0,0,114,120,0,0,0,114,52,1,0,0,114, + 5,0,0,0,114,5,0,0,0,114,8,0,0,0,114,53, + 1,0,0,76,5,0,0,115,4,0,0,0,0,10,12,1, + 122,29,80,97,116,104,70,105,110,100,101,114,46,102,105,110, + 100,95,100,105,115,116,114,105,98,117,116,105,111,110,115,41, + 1,78,41,2,78,78,41,1,78,41,13,114,125,0,0,0, + 114,124,0,0,0,114,126,0,0,0,114,127,0,0,0,114, + 207,0,0,0,114,38,1,0,0,114,44,1,0,0,114,47, + 1,0,0,114,48,1,0,0,114,51,1,0,0,114,203,0, + 0,0,114,206,0,0,0,114,53,1,0,0,114,5,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,37,1,0,0,199,4,0,0,115,34,0,0,0,8,2, + 4,2,2,1,10,9,2,1,10,12,2,1,10,21,2,1, + 10,14,2,1,12,31,2,1,12,23,2,1,12,12,2,1, + 114,37,1,0,0,99,0,0,0,0,0,0,0,0,0,0, + 0,0,0,0,0,0,3,0,0,0,64,0,0,0,115,90, + 0,0,0,101,0,90,1,100,0,90,2,100,1,90,3,100, + 2,100,3,132,0,90,4,100,4,100,5,132,0,90,5,101, + 6,90,7,100,6,100,7,132,0,90,8,100,8,100,9,132, + 0,90,9,100,19,100,11,100,12,132,1,90,10,100,13,100, + 14,132,0,90,11,101,12,100,15,100,16,132,0,131,1,90, + 13,100,17,100,18,132,0,90,14,100,10,83,0,41,20,218, + 10,70,105,108,101,70,105,110,100,101,114,122,172,70,105,108, + 101,45,98,97,115,101,100,32,102,105,110,100,101,114,46,10, + 10,32,32,32,32,73,110,116,101,114,97,99,116,105,111,110, + 115,32,119,105,116,104,32,116,104,101,32,102,105,108,101,32, + 115,121,115,116,101,109,32,97,114,101,32,99,97,99,104,101, + 100,32,102,111,114,32,112,101,114,102,111,114,109,97,110,99, + 101,44,32,98,101,105,110,103,10,32,32,32,32,114,101,102, + 114,101,115,104,101,100,32,119,104,101,110,32,116,104,101,32, + 100,105,114,101,99,116,111,114,121,32,116,104,101,32,102,105, + 110,100,101,114,32,105,115,32,104,97,110,100,108,105,110,103, + 32,104,97,115,32,98,101,101,110,32,109,111,100,105,102,105, + 101,100,46,10,10,32,32,32,32,99,2,0,0,0,0,0, + 0,0,0,0,0,0,5,0,0,0,6,0,0,0,7,0, + 0,0,115,84,0,0,0,103,0,125,3,124,2,68,0,93, + 32,92,2,137,0,125,4,124,3,160,0,135,0,102,1,100, + 1,100,2,132,8,124,4,68,0,131,1,161,1,1,0,113, + 8,124,3,124,0,95,1,124,1,112,54,100,3,124,0,95, + 2,100,4,124,0,95,3,116,4,131,0,124,0,95,5,116, + 4,131,0,124,0,95,6,100,5,83,0,41,6,122,154,73, + 110,105,116,105,97,108,105,122,101,32,119,105,116,104,32,116, + 104,101,32,112,97,116,104,32,116,111,32,115,101,97,114,99, + 104,32,111,110,32,97,110,100,32,97,32,118,97,114,105,97, + 98,108,101,32,110,117,109,98,101,114,32,111,102,10,32,32, + 32,32,32,32,32,32,50,45,116,117,112,108,101,115,32,99, + 111,110,116,97,105,110,105,110,103,32,116,104,101,32,108,111, + 97,100,101,114,32,97,110,100,32,116,104,101,32,102,105,108, + 101,32,115,117,102,102,105,120,101,115,32,116,104,101,32,108, + 111,97,100,101,114,10,32,32,32,32,32,32,32,32,114,101, + 99,111,103,110,105,122,101,115,46,99,1,0,0,0,0,0, + 0,0,0,0,0,0,2,0,0,0,3,0,0,0,51,0, + 0,0,115,22,0,0,0,124,0,93,14,125,1,124,1,136, + 0,102,2,86,0,1,0,113,2,100,0,83,0,114,109,0, + 0,0,114,5,0,0,0,114,7,1,0,0,169,1,114,140, + 0,0,0,114,5,0,0,0,114,8,0,0,0,114,10,1, + 0,0,105,5,0,0,243,0,0,0,0,122,38,70,105,108, + 101,70,105,110,100,101,114,46,95,95,105,110,105,116,95,95, + 46,60,108,111,99,97,108,115,62,46,60,103,101,110,101,120, + 112,114,62,114,71,0,0,0,114,104,0,0,0,78,41,7, + 114,167,0,0,0,218,8,95,108,111,97,100,101,114,115,114, + 44,0,0,0,218,11,95,112,97,116,104,95,109,116,105,109, + 101,218,3,115,101,116,218,11,95,112,97,116,104,95,99,97, + 99,104,101,218,19,95,114,101,108,97,120,101,100,95,112,97, + 116,104,95,99,97,99,104,101,41,5,114,118,0,0,0,114, + 44,0,0,0,218,14,108,111,97,100,101,114,95,100,101,116, + 97,105,108,115,90,7,108,111,97,100,101,114,115,114,189,0, + 0,0,114,5,0,0,0,114,55,1,0,0,114,8,0,0, + 0,114,209,0,0,0,99,5,0,0,115,16,0,0,0,0, + 4,4,1,12,1,26,1,6,2,10,1,6,1,8,1,122, + 19,70,105,108,101,70,105,110,100,101,114,46,95,95,105,110, + 105,116,95,95,99,1,0,0,0,0,0,0,0,0,0,0, + 0,1,0,0,0,2,0,0,0,67,0,0,0,115,10,0, + 0,0,100,1,124,0,95,0,100,2,83,0,41,3,122,31, + 73,110,118,97,108,105,100,97,116,101,32,116,104,101,32,100, + 105,114,101,99,116,111,114,121,32,109,116,105,109,101,46,114, + 104,0,0,0,78,41,1,114,58,1,0,0,114,246,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,38,1,0,0,113,5,0,0,115,2,0,0,0,0,2, + 122,28,70,105,108,101,70,105,110,100,101,114,46,105,110,118, + 97,108,105,100,97,116,101,95,99,97,99,104,101,115,99,2, + 0,0,0,0,0,0,0,0,0,0,0,3,0,0,0,3, + 0,0,0,67,0,0,0,115,42,0,0,0,124,0,160,0, + 124,1,161,1,125,2,124,2,100,1,117,0,114,26,100,1, + 103,0,102,2,83,0,124,2,106,1,124,2,106,2,112,38, + 103,0,102,2,83,0,41,2,122,197,84,114,121,32,116,111, + 32,102,105,110,100,32,97,32,108,111,97,100,101,114,32,102, + 111,114,32,116,104,101,32,115,112,101,99,105,102,105,101,100, + 32,109,111,100,117,108,101,44,32,111,114,32,116,104,101,32, + 110,97,109,101,115,112,97,99,101,10,32,32,32,32,32,32, + 32,32,112,97,99,107,97,103,101,32,112,111,114,116,105,111, + 110,115,46,32,82,101,116,117,114,110,115,32,40,108,111,97, + 100,101,114,44,32,108,105,115,116,45,111,102,45,112,111,114, + 116,105,111,110,115,41,46,10,10,32,32,32,32,32,32,32, + 32,84,104,105,115,32,109,101,116,104,111,100,32,105,115,32, + 100,101,112,114,101,99,97,116,101,100,46,32,32,85,115,101, + 32,102,105,110,100,95,115,112,101,99,40,41,32,105,110,115, + 116,101,97,100,46,10,10,32,32,32,32,32,32,32,32,78, + 41,3,114,203,0,0,0,114,140,0,0,0,114,178,0,0, + 0,41,3,114,118,0,0,0,114,139,0,0,0,114,187,0, + 0,0,114,5,0,0,0,114,5,0,0,0,114,8,0,0, + 0,114,137,0,0,0,119,5,0,0,115,8,0,0,0,0, + 7,10,1,8,1,8,1,122,22,70,105,108,101,70,105,110, + 100,101,114,46,102,105,110,100,95,108,111,97,100,101,114,99, + 6,0,0,0,0,0,0,0,0,0,0,0,7,0,0,0, + 6,0,0,0,67,0,0,0,115,26,0,0,0,124,1,124, + 2,124,3,131,2,125,6,116,0,124,2,124,3,124,6,124, + 4,100,1,141,4,83,0,41,2,78,114,177,0,0,0,41, + 1,114,190,0,0,0,41,7,114,118,0,0,0,114,188,0, + 0,0,114,139,0,0,0,114,44,0,0,0,90,4,115,109, + 115,108,114,202,0,0,0,114,140,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,114,51,1,0,0, + 131,5,0,0,115,8,0,0,0,0,1,10,1,8,1,2, + 255,122,20,70,105,108,101,70,105,110,100,101,114,46,95,103, + 101,116,95,115,112,101,99,78,99,3,0,0,0,0,0,0, + 0,0,0,0,0,14,0,0,0,8,0,0,0,67,0,0, + 0,115,96,1,0,0,100,1,125,3,124,1,160,0,100,2, + 161,1,100,3,25,0,125,4,122,24,116,1,124,0,106,2, + 112,34,116,3,160,4,161,0,131,1,106,5,125,5,87,0, + 110,22,4,0,116,6,121,64,1,0,1,0,1,0,100,4, + 125,5,89,0,110,2,48,0,124,5,124,0,106,7,107,3, + 114,90,124,0,160,8,161,0,1,0,124,5,124,0,95,7, + 116,9,131,0,114,112,124,0,106,10,125,6,124,4,160,11, + 161,0,125,7,110,10,124,0,106,12,125,6,124,4,125,7, + 124,7,124,6,118,0,114,216,116,13,124,0,106,2,124,4, + 131,2,125,8,124,0,106,14,68,0,93,58,92,2,125,9, + 125,10,100,5,124,9,23,0,125,11,116,13,124,8,124,11, + 131,2,125,12,116,15,124,12,131,1,114,148,124,0,160,16, + 124,10,124,1,124,12,124,8,103,1,124,2,161,5,2,0, + 1,0,83,0,113,148,116,17,124,8,131,1,125,3,124,0, + 106,14,68,0,93,82,92,2,125,9,125,10,116,13,124,0, + 106,2,124,4,124,9,23,0,131,2,125,12,116,18,106,19, + 100,6,124,12,100,3,100,7,141,3,1,0,124,7,124,9, + 23,0,124,6,118,0,114,222,116,15,124,12,131,1,114,222, + 124,0,160,16,124,10,124,1,124,12,100,8,124,2,161,5, + 2,0,1,0,83,0,113,222,124,3,144,1,114,92,116,18, + 160,19,100,9,124,8,161,2,1,0,116,18,160,20,124,1, + 100,8,161,2,125,13,124,8,103,1,124,13,95,21,124,13, + 83,0,100,8,83,0,41,10,122,111,84,114,121,32,116,111, + 32,102,105,110,100,32,97,32,115,112,101,99,32,102,111,114, + 32,116,104,101,32,115,112,101,99,105,102,105,101,100,32,109, + 111,100,117,108,101,46,10,10,32,32,32,32,32,32,32,32, + 82,101,116,117,114,110,115,32,116,104,101,32,109,97,116,99, + 104,105,110,103,32,115,112,101,99,44,32,111,114,32,78,111, + 110,101,32,105,102,32,110,111,116,32,102,111,117,110,100,46, + 10,32,32,32,32,32,32,32,32,70,114,71,0,0,0,114, + 28,0,0,0,114,104,0,0,0,114,209,0,0,0,122,9, + 116,114,121,105,110,103,32,123,125,41,1,90,9,118,101,114, + 98,111,115,105,116,121,78,122,25,112,111,115,115,105,98,108, + 101,32,110,97,109,101,115,112,97,99,101,32,102,111,114,32, + 123,125,41,22,114,41,0,0,0,114,49,0,0,0,114,44, + 0,0,0,114,4,0,0,0,114,55,0,0,0,114,0,1, + 0,0,114,50,0,0,0,114,58,1,0,0,218,11,95,102, + 105,108,108,95,99,97,99,104,101,114,9,0,0,0,114,61, + 1,0,0,114,105,0,0,0,114,60,1,0,0,114,38,0, + 0,0,114,57,1,0,0,114,54,0,0,0,114,51,1,0, + 0,114,56,0,0,0,114,134,0,0,0,114,149,0,0,0, + 114,183,0,0,0,114,178,0,0,0,41,14,114,118,0,0, + 0,114,139,0,0,0,114,202,0,0,0,90,12,105,115,95, + 110,97,109,101,115,112,97,99,101,90,11,116,97,105,108,95, + 109,111,100,117,108,101,114,169,0,0,0,90,5,99,97,99, + 104,101,90,12,99,97,99,104,101,95,109,111,100,117,108,101, + 90,9,98,97,115,101,95,112,97,116,104,114,8,1,0,0, + 114,188,0,0,0,90,13,105,110,105,116,95,102,105,108,101, + 110,97,109,101,90,9,102,117,108,108,95,112,97,116,104,114, + 187,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, + 0,0,0,114,203,0,0,0,136,5,0,0,115,72,0,0, + 0,0,5,4,1,14,1,2,1,24,1,12,1,10,1,10, + 1,8,1,6,2,6,1,6,1,10,2,6,1,4,2,8, + 1,12,1,14,1,8,1,10,1,8,1,26,4,8,2,14, + 1,16,1,16,1,12,1,8,1,10,1,4,255,10,2,6, + 1,12,1,12,1,8,1,4,1,122,20,70,105,108,101,70, + 105,110,100,101,114,46,102,105,110,100,95,115,112,101,99,99, + 1,0,0,0,0,0,0,0,0,0,0,0,9,0,0,0, + 10,0,0,0,67,0,0,0,115,188,0,0,0,124,0,106, + 0,125,1,122,22,116,1,160,2,124,1,112,22,116,1,160, + 3,161,0,161,1,125,2,87,0,110,28,4,0,116,4,116, + 5,116,6,102,3,121,56,1,0,1,0,1,0,103,0,125, + 2,89,0,110,2,48,0,116,7,106,8,160,9,100,1,161, + 1,115,82,116,10,124,2,131,1,124,0,95,11,110,74,116, + 10,131,0,125,3,124,2,68,0,93,56,125,4,124,4,160, + 12,100,2,161,1,92,3,125,5,125,6,125,7,124,6,114, + 134,100,3,160,13,124,5,124,7,160,14,161,0,161,2,125, + 8,110,4,124,5,125,8,124,3,160,15,124,8,161,1,1, + 0,113,92,124,3,124,0,95,11,116,7,106,8,160,9,116, + 16,161,1,114,184,100,4,100,5,132,0,124,2,68,0,131, + 1,124,0,95,17,100,6,83,0,41,7,122,68,70,105,108, + 108,32,116,104,101,32,99,97,99,104,101,32,111,102,32,112, + 111,116,101,110,116,105,97,108,32,109,111,100,117,108,101,115, + 32,97,110,100,32,112,97,99,107,97,103,101,115,32,102,111, + 114,32,116,104,105,115,32,100,105,114,101,99,116,111,114,121, + 46,114,0,0,0,0,114,71,0,0,0,114,61,0,0,0, + 99,1,0,0,0,0,0,0,0,0,0,0,0,2,0,0, + 0,4,0,0,0,83,0,0,0,115,20,0,0,0,104,0, + 124,0,93,12,125,1,124,1,160,0,161,0,146,2,113,4, + 83,0,114,5,0,0,0,41,1,114,105,0,0,0,41,2, + 114,32,0,0,0,90,2,102,110,114,5,0,0,0,114,5, + 0,0,0,114,8,0,0,0,218,9,60,115,101,116,99,111, + 109,112,62,213,5,0,0,114,56,1,0,0,122,41,70,105, + 108,101,70,105,110,100,101,114,46,95,102,105,108,108,95,99, + 97,99,104,101,46,60,108,111,99,97,108,115,62,46,60,115, + 101,116,99,111,109,112,62,78,41,18,114,44,0,0,0,114, + 4,0,0,0,90,7,108,105,115,116,100,105,114,114,55,0, + 0,0,114,45,1,0,0,218,15,80,101,114,109,105,115,115, + 105,111,110,69,114,114,111,114,218,18,78,111,116,65,68,105, + 114,101,99,116,111,114,121,69,114,114,111,114,114,1,0,0, + 0,114,10,0,0,0,114,11,0,0,0,114,59,1,0,0, + 114,60,1,0,0,114,100,0,0,0,114,62,0,0,0,114, + 105,0,0,0,218,3,97,100,100,114,12,0,0,0,114,61, + 1,0,0,41,9,114,118,0,0,0,114,44,0,0,0,90, + 8,99,111,110,116,101,110,116,115,90,21,108,111,119,101,114, + 95,115,117,102,102,105,120,95,99,111,110,116,101,110,116,115, + 114,33,1,0,0,114,116,0,0,0,114,20,1,0,0,114, + 8,1,0,0,90,8,110,101,119,95,110,97,109,101,114,5, + 0,0,0,114,5,0,0,0,114,8,0,0,0,114,63,1, + 0,0,184,5,0,0,115,34,0,0,0,0,2,6,1,2, + 1,22,1,18,3,10,3,12,1,12,7,6,1,8,1,16, + 1,4,1,18,2,4,1,12,1,6,1,12,1,122,22,70, + 105,108,101,70,105,110,100,101,114,46,95,102,105,108,108,95, + 99,97,99,104,101,99,1,0,0,0,0,0,0,0,0,0, + 0,0,3,0,0,0,3,0,0,0,7,0,0,0,115,18, + 0,0,0,135,0,135,1,102,2,100,1,100,2,132,8,125, + 2,124,2,83,0,41,3,97,20,1,0,0,65,32,99,108, + 97,115,115,32,109,101,116,104,111,100,32,119,104,105,99,104, + 32,114,101,116,117,114,110,115,32,97,32,99,108,111,115,117, + 114,101,32,116,111,32,117,115,101,32,111,110,32,115,121,115, + 46,112,97,116,104,95,104,111,111,107,10,32,32,32,32,32, + 32,32,32,119,104,105,99,104,32,119,105,108,108,32,114,101, + 116,117,114,110,32,97,110,32,105,110,115,116,97,110,99,101, + 32,117,115,105,110,103,32,116,104,101,32,115,112,101,99,105, + 102,105,101,100,32,108,111,97,100,101,114,115,32,97,110,100, + 32,116,104,101,32,112,97,116,104,10,32,32,32,32,32,32, + 32,32,99,97,108,108,101,100,32,111,110,32,116,104,101,32, + 99,108,111,115,117,114,101,46,10,10,32,32,32,32,32,32, + 32,32,73,102,32,116,104,101,32,112,97,116,104,32,99,97, + 108,108,101,100,32,111,110,32,116,104,101,32,99,108,111,115, + 117,114,101,32,105,115,32,110,111,116,32,97,32,100,105,114, + 101,99,116,111,114,121,44,32,73,109,112,111,114,116,69,114, + 114,111,114,32,105,115,10,32,32,32,32,32,32,32,32,114, + 97,105,115,101,100,46,10,10,32,32,32,32,32,32,32,32, + 99,1,0,0,0,0,0,0,0,0,0,0,0,1,0,0, + 0,4,0,0,0,19,0,0,0,115,36,0,0,0,116,0, + 124,0,131,1,115,20,116,1,100,1,124,0,100,2,141,2, + 130,1,136,0,124,0,103,1,136,1,162,1,82,0,142,0, + 83,0,41,3,122,45,80,97,116,104,32,104,111,111,107,32, + 102,111,114,32,105,109,112,111,114,116,108,105,98,46,109,97, + 99,104,105,110,101,114,121,46,70,105,108,101,70,105,110,100, + 101,114,46,122,30,111,110,108,121,32,100,105,114,101,99,116, + 111,114,105,101,115,32,97,114,101,32,115,117,112,112,111,114, + 116,101,100,114,48,0,0,0,41,2,114,56,0,0,0,114, + 117,0,0,0,114,48,0,0,0,169,2,114,193,0,0,0, + 114,62,1,0,0,114,5,0,0,0,114,8,0,0,0,218, + 24,112,97,116,104,95,104,111,111,107,95,102,111,114,95,70, + 105,108,101,70,105,110,100,101,114,225,5,0,0,115,6,0, + 0,0,0,2,8,1,12,1,122,54,70,105,108,101,70,105, + 110,100,101,114,46,112,97,116,104,95,104,111,111,107,46,60, + 108,111,99,97,108,115,62,46,112,97,116,104,95,104,111,111, + 107,95,102,111,114,95,70,105,108,101,70,105,110,100,101,114, + 114,5,0,0,0,41,3,114,193,0,0,0,114,62,1,0, + 0,114,69,1,0,0,114,5,0,0,0,114,68,1,0,0, + 114,8,0,0,0,218,9,112,97,116,104,95,104,111,111,107, + 215,5,0,0,115,4,0,0,0,0,10,14,6,122,20,70, + 105,108,101,70,105,110,100,101,114,46,112,97,116,104,95,104, + 111,111,107,99,1,0,0,0,0,0,0,0,0,0,0,0, + 1,0,0,0,3,0,0,0,67,0,0,0,115,12,0,0, + 0,100,1,160,0,124,0,106,1,161,1,83,0,41,2,78, + 122,16,70,105,108,101,70,105,110,100,101,114,40,123,33,114, + 125,41,41,2,114,62,0,0,0,114,44,0,0,0,114,246, + 0,0,0,114,5,0,0,0,114,5,0,0,0,114,8,0, + 0,0,114,31,1,0,0,233,5,0,0,115,2,0,0,0, + 0,1,122,19,70,105,108,101,70,105,110,100,101,114,46,95, + 95,114,101,112,114,95,95,41,1,78,41,15,114,125,0,0, + 0,114,124,0,0,0,114,126,0,0,0,114,127,0,0,0, + 114,209,0,0,0,114,38,1,0,0,114,143,0,0,0,114, + 206,0,0,0,114,137,0,0,0,114,51,1,0,0,114,203, + 0,0,0,114,63,1,0,0,114,207,0,0,0,114,70,1, + 0,0,114,31,1,0,0,114,5,0,0,0,114,5,0,0, + 0,114,5,0,0,0,114,8,0,0,0,114,54,1,0,0, + 90,5,0,0,115,22,0,0,0,8,2,4,7,8,14,8, + 4,4,2,8,12,8,5,10,48,8,31,2,1,10,17,114, + 54,1,0,0,99,4,0,0,0,0,0,0,0,0,0,0, + 0,6,0,0,0,8,0,0,0,67,0,0,0,115,144,0, + 0,0,124,0,160,0,100,1,161,1,125,4,124,0,160,0, + 100,2,161,1,125,5,124,4,115,66,124,5,114,36,124,5, + 106,1,125,4,110,30,124,2,124,3,107,2,114,56,116,2, + 124,1,124,2,131,2,125,4,110,10,116,3,124,1,124,2, + 131,2,125,4,124,5,115,84,116,4,124,1,124,2,124,4, + 100,3,141,3,125,5,122,36,124,5,124,0,100,2,60,0, + 124,4,124,0,100,1,60,0,124,2,124,0,100,4,60,0, + 124,3,124,0,100,5,60,0,87,0,110,18,4,0,116,5, + 121,138,1,0,1,0,1,0,89,0,110,2,48,0,100,0, + 83,0,41,6,78,218,10,95,95,108,111,97,100,101,114,95, + 95,218,8,95,95,115,112,101,99,95,95,114,55,1,0,0, + 90,8,95,95,102,105,108,101,95,95,90,10,95,95,99,97, + 99,104,101,100,95,95,41,6,218,3,103,101,116,114,140,0, + 0,0,114,5,1,0,0,114,255,0,0,0,114,190,0,0, + 0,218,9,69,120,99,101,112,116,105,111,110,41,6,90,2, + 110,115,114,116,0,0,0,90,8,112,97,116,104,110,97,109, + 101,90,9,99,112,97,116,104,110,97,109,101,114,140,0,0, + 0,114,187,0,0,0,114,5,0,0,0,114,5,0,0,0, + 114,8,0,0,0,218,14,95,102,105,120,95,117,112,95,109, + 111,100,117,108,101,239,5,0,0,115,34,0,0,0,0,2, + 10,1,10,1,4,1,4,1,8,1,8,1,12,2,10,1, + 4,1,14,1,2,1,8,1,8,1,8,1,12,1,12,2, + 114,75,1,0,0,99,0,0,0,0,0,0,0,0,0,0, + 0,0,3,0,0,0,3,0,0,0,67,0,0,0,115,38, + 0,0,0,116,0,116,1,160,2,161,0,102,2,125,0,116, + 3,116,4,102,2,125,1,116,5,116,6,102,2,125,2,124, + 0,124,1,124,2,103,3,83,0,41,1,122,95,82,101,116, + 117,114,110,115,32,97,32,108,105,115,116,32,111,102,32,102, + 105,108,101,45,98,97,115,101,100,32,109,111,100,117,108,101, + 32,108,111,97,100,101,114,115,46,10,10,32,32,32,32,69, + 97,99,104,32,105,116,101,109,32,105,115,32,97,32,116,117, + 112,108,101,32,40,108,111,97,100,101,114,44,32,115,117,102, + 102,105,120,101,115,41,46,10,32,32,32,32,41,7,114,252, + 0,0,0,114,163,0,0,0,218,18,101,120,116,101,110,115, + 105,111,110,95,115,117,102,102,105,120,101,115,114,255,0,0, + 0,114,101,0,0,0,114,5,1,0,0,114,88,0,0,0, + 41,3,90,10,101,120,116,101,110,115,105,111,110,115,90,6, + 115,111,117,114,99,101,90,8,98,121,116,101,99,111,100,101, + 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,114, + 184,0,0,0,6,6,0,0,115,8,0,0,0,0,5,12, + 1,8,1,8,1,114,184,0,0,0,99,1,0,0,0,0, + 0,0,0,0,0,0,0,10,0,0,0,9,0,0,0,67, + 0,0,0,115,132,1,0,0,124,0,97,0,116,0,106,1, + 97,1,116,0,106,2,97,2,116,1,106,3,116,4,25,0, + 125,1,100,1,100,2,103,1,102,2,100,3,100,4,100,2, + 103,2,102,2,102,2,125,2,124,2,68,0,93,108,92,2, + 125,3,125,4,116,5,100,5,100,6,132,0,124,4,68,0, + 131,1,131,1,115,82,74,0,130,1,124,4,100,7,25,0, + 125,5,124,3,116,1,106,3,118,0,114,116,116,1,106,3, + 124,3,25,0,125,6,1,0,113,170,113,52,122,20,116,0, + 160,6,124,3,161,1,125,6,87,0,1,0,113,170,87,0, + 113,52,4,0,116,7,121,158,1,0,1,0,1,0,89,0, + 113,52,89,0,113,52,48,0,113,52,116,7,100,8,131,1, + 130,1,116,8,124,1,100,9,124,6,131,3,1,0,116,8, + 124,1,100,10,124,5,131,3,1,0,116,8,124,1,100,11, + 100,12,160,9,124,4,161,1,131,3,1,0,116,8,124,1, + 100,13,100,14,100,15,132,0,124,4,68,0,131,1,131,3, + 1,0,103,0,100,16,162,1,125,7,124,3,100,3,107,2, + 144,1,114,6,124,7,160,10,100,17,161,1,1,0,124,7, + 68,0,93,52,125,8,124,8,116,1,106,3,118,1,144,1, + 114,38,116,0,160,6,124,8,161,1,125,9,110,10,116,1, + 106,3,124,8,25,0,125,9,116,8,124,1,124,8,124,9, + 131,3,1,0,144,1,113,10,116,8,124,1,100,18,116,11, + 131,0,131,3,1,0,116,12,160,13,116,2,160,14,161,0, + 161,1,1,0,124,3,100,3,107,2,144,1,114,128,116,15, + 160,10,100,19,161,1,1,0,100,20,116,12,118,0,144,1, + 114,128,100,21,116,16,95,17,100,22,83,0,41,23,122,205, + 83,101,116,117,112,32,116,104,101,32,112,97,116,104,45,98, + 97,115,101,100,32,105,109,112,111,114,116,101,114,115,32,102, + 111,114,32,105,109,112,111,114,116,108,105,98,32,98,121,32, + 105,109,112,111,114,116,105,110,103,32,110,101,101,100,101,100, + 10,32,32,32,32,98,117,105,108,116,45,105,110,32,109,111, + 100,117,108,101,115,32,97,110,100,32,105,110,106,101,99,116, + 105,110,103,32,116,104,101,109,32,105,110,116,111,32,116,104, + 101,32,103,108,111,98,97,108,32,110,97,109,101,115,112,97, + 99,101,46,10,10,32,32,32,32,79,116,104,101,114,32,99, + 111,109,112,111,110,101,110,116,115,32,97,114,101,32,101,120, + 116,114,97,99,116,101,100,32,102,114,111,109,32,116,104,101, + 32,99,111,114,101,32,98,111,111,116,115,116,114,97,112,32, + 109,111,100,117,108,101,46,10,10,32,32,32,32,90,5,112, + 111,115,105,120,250,1,47,90,2,110,116,250,1,92,99,1, + 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,3, + 0,0,0,115,0,0,0,115,26,0,0,0,124,0,93,18, + 125,1,116,0,124,1,131,1,100,0,107,2,86,0,1,0, + 113,2,100,1,83,0,41,2,114,39,0,0,0,78,41,1, + 114,23,0,0,0,41,2,114,32,0,0,0,114,94,0,0, + 0,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,10,1,0,0,35,6,0,0,114,56,1,0,0,122,25, + 95,115,101,116,117,112,46,60,108,111,99,97,108,115,62,46, + 60,103,101,110,101,120,112,114,62,114,73,0,0,0,122,30, + 105,109,112,111,114,116,108,105,98,32,114,101,113,117,105,114, + 101,115,32,112,111,115,105,120,32,111,114,32,110,116,114,4, + 0,0,0,114,35,0,0,0,114,31,0,0,0,114,40,0, + 0,0,114,58,0,0,0,99,1,0,0,0,0,0,0,0, + 0,0,0,0,2,0,0,0,4,0,0,0,83,0,0,0, + 115,22,0,0,0,104,0,124,0,93,14,125,1,100,0,124, + 1,155,0,157,2,146,2,113,4,83,0,41,1,114,74,0, + 0,0,114,5,0,0,0,41,2,114,32,0,0,0,218,1, + 115,114,5,0,0,0,114,5,0,0,0,114,8,0,0,0, + 114,64,1,0,0,52,6,0,0,114,56,1,0,0,122,25, + 95,115,101,116,117,112,46,60,108,111,99,97,108,115,62,46, + 60,115,101,116,99,111,109,112,62,41,3,114,64,0,0,0, + 114,75,0,0,0,114,160,0,0,0,114,192,0,0,0,114, + 9,0,0,0,122,4,46,112,121,119,122,6,95,100,46,112, + 121,100,84,78,41,18,114,134,0,0,0,114,1,0,0,0, + 114,163,0,0,0,114,22,1,0,0,114,125,0,0,0,218, + 3,97,108,108,90,18,95,98,117,105,108,116,105,110,95,102, + 114,111,109,95,110,97,109,101,114,117,0,0,0,114,129,0, + 0,0,114,36,0,0,0,114,186,0,0,0,114,14,0,0, + 0,114,12,1,0,0,114,167,0,0,0,114,76,1,0,0, + 114,101,0,0,0,114,191,0,0,0,114,195,0,0,0,41, + 10,218,17,95,98,111,111,116,115,116,114,97,112,95,109,111, + 100,117,108,101,90,11,115,101,108,102,95,109,111,100,117,108, + 101,90,10,111,115,95,100,101,116,97,105,108,115,90,10,98, + 117,105,108,116,105,110,95,111,115,114,31,0,0,0,114,35, + 0,0,0,90,9,111,115,95,109,111,100,117,108,101,90,13, + 98,117,105,108,116,105,110,95,110,97,109,101,115,90,12,98, + 117,105,108,116,105,110,95,110,97,109,101,90,14,98,117,105, + 108,116,105,110,95,109,111,100,117,108,101,114,5,0,0,0, + 114,5,0,0,0,114,8,0,0,0,218,6,95,115,101,116, + 117,112,17,6,0,0,115,70,0,0,0,0,8,4,1,6, + 1,6,2,10,3,22,1,12,2,22,1,8,1,10,1,10, + 1,6,2,2,1,10,1,10,1,12,1,12,2,8,2,12, + 1,12,1,18,1,22,3,8,1,10,1,10,1,8,1,12, + 1,12,2,10,1,16,3,14,1,14,1,10,1,10,1,10, + 1,114,82,1,0,0,99,1,0,0,0,0,0,0,0,0, + 0,0,0,2,0,0,0,4,0,0,0,67,0,0,0,115, + 50,0,0,0,116,0,124,0,131,1,1,0,116,1,131,0, + 125,1,116,2,106,3,160,4,116,5,106,6,124,1,142,0, + 103,1,161,1,1,0,116,2,106,7,160,8,116,9,161,1, + 1,0,100,1,83,0,41,2,122,41,73,110,115,116,97,108, + 108,32,116,104,101,32,112,97,116,104,45,98,97,115,101,100, + 32,105,109,112,111,114,116,32,99,111,109,112,111,110,101,110, + 116,115,46,78,41,10,114,82,1,0,0,114,184,0,0,0, + 114,1,0,0,0,114,43,1,0,0,114,167,0,0,0,114, + 54,1,0,0,114,70,1,0,0,218,9,109,101,116,97,95, + 112,97,116,104,114,186,0,0,0,114,37,1,0,0,41,2, + 114,81,1,0,0,90,17,115,117,112,112,111,114,116,101,100, + 95,108,111,97,100,101,114,115,114,5,0,0,0,114,5,0, + 0,0,114,8,0,0,0,218,8,95,105,110,115,116,97,108, + 108,74,6,0,0,115,8,0,0,0,0,2,8,1,6,1, + 20,1,114,84,1,0,0,41,1,114,60,0,0,0,41,1, + 78,41,3,78,78,78,41,2,114,73,0,0,0,114,73,0, + 0,0,41,1,84,41,1,78,41,1,78,41,63,114,127,0, + 0,0,114,13,0,0,0,90,37,95,67,65,83,69,95,73, + 78,83,69,78,83,73,84,73,86,69,95,80,76,65,84,70, + 79,82,77,83,95,66,89,84,69,83,95,75,69,89,114,12, + 0,0,0,114,14,0,0,0,114,21,0,0,0,114,27,0, + 0,0,114,29,0,0,0,114,38,0,0,0,114,47,0,0, + 0,114,49,0,0,0,114,53,0,0,0,114,54,0,0,0, + 114,56,0,0,0,114,59,0,0,0,114,69,0,0,0,218, + 4,116,121,112,101,218,8,95,95,99,111,100,101,95,95,114, + 162,0,0,0,114,19,0,0,0,114,148,0,0,0,114,18, + 0,0,0,114,24,0,0,0,114,236,0,0,0,114,91,0, + 0,0,114,87,0,0,0,114,101,0,0,0,114,88,0,0, + 0,90,23,68,69,66,85,71,95,66,89,84,69,67,79,68, + 69,95,83,85,70,70,73,88,69,83,90,27,79,80,84,73, + 77,73,90,69,68,95,66,89,84,69,67,79,68,69,95,83, + 85,70,70,73,88,69,83,114,97,0,0,0,114,102,0,0, + 0,114,108,0,0,0,114,112,0,0,0,114,114,0,0,0, + 114,136,0,0,0,114,143,0,0,0,114,152,0,0,0,114, + 156,0,0,0,114,158,0,0,0,114,165,0,0,0,114,170, + 0,0,0,114,171,0,0,0,114,176,0,0,0,218,6,111, + 98,106,101,99,116,114,185,0,0,0,114,190,0,0,0,114, + 191,0,0,0,114,208,0,0,0,114,221,0,0,0,114,239, + 0,0,0,114,255,0,0,0,114,5,1,0,0,114,12,1, + 0,0,114,252,0,0,0,114,13,1,0,0,114,35,1,0, + 0,114,37,1,0,0,114,54,1,0,0,114,75,1,0,0, + 114,184,0,0,0,114,82,1,0,0,114,84,1,0,0,114, 5,0,0,0,114,5,0,0,0,114,5,0,0,0,114,8, - 0,0,0,114,54,1,0,0,90,5,0,0,115,22,0,0, - 0,8,2,4,7,8,14,8,4,4,2,8,12,8,5,10, - 48,8,31,2,1,10,17,114,54,1,0,0,99,4,0,0, - 0,0,0,0,0,0,0,0,0,6,0,0,0,8,0,0, - 0,67,0,0,0,115,144,0,0,0,124,0,160,0,100,1, - 161,1,125,4,124,0,160,0,100,2,161,1,125,5,124,4, - 115,66,124,5,114,36,124,5,106,1,125,4,110,30,124,2, - 124,3,107,2,114,56,116,2,124,1,124,2,131,2,125,4, - 110,10,116,3,124,1,124,2,131,2,125,4,124,5,115,84, - 116,4,124,1,124,2,124,4,100,3,141,3,125,5,122,36, - 124,5,124,0,100,2,60,0,124,4,124,0,100,1,60,0, - 124,2,124,0,100,4,60,0,124,3,124,0,100,5,60,0, - 87,0,110,18,4,0,116,5,121,138,1,0,1,0,1,0, - 89,0,110,2,48,0,100,0,83,0,41,6,78,218,10,95, - 95,108,111,97,100,101,114,95,95,218,8,95,95,115,112,101, - 99,95,95,114,55,1,0,0,90,8,95,95,102,105,108,101, - 95,95,90,10,95,95,99,97,99,104,101,100,95,95,41,6, - 218,3,103,101,116,114,140,0,0,0,114,5,1,0,0,114, - 255,0,0,0,114,190,0,0,0,218,9,69,120,99,101,112, - 116,105,111,110,41,6,90,2,110,115,114,116,0,0,0,90, - 8,112,97,116,104,110,97,109,101,90,9,99,112,97,116,104, - 110,97,109,101,114,140,0,0,0,114,187,0,0,0,114,5, - 0,0,0,114,5,0,0,0,114,8,0,0,0,218,14,95, - 102,105,120,95,117,112,95,109,111,100,117,108,101,239,5,0, - 0,115,34,0,0,0,0,2,10,1,10,1,4,1,4,1, - 8,1,8,1,12,2,10,1,4,1,14,1,2,1,8,1, - 8,1,8,1,12,1,12,2,114,75,1,0,0,99,0,0, - 0,0,0,0,0,0,0,0,0,0,3,0,0,0,3,0, - 0,0,67,0,0,0,115,38,0,0,0,116,0,116,1,160, - 2,161,0,102,2,125,0,116,3,116,4,102,2,125,1,116, - 5,116,6,102,2,125,2,124,0,124,1,124,2,103,3,83, - 0,41,1,122,95,82,101,116,117,114,110,115,32,97,32,108, - 105,115,116,32,111,102,32,102,105,108,101,45,98,97,115,101, - 100,32,109,111,100,117,108,101,32,108,111,97,100,101,114,115, - 46,10,10,32,32,32,32,69,97,99,104,32,105,116,101,109, - 32,105,115,32,97,32,116,117,112,108,101,32,40,108,111,97, - 100,101,114,44,32,115,117,102,102,105,120,101,115,41,46,10, - 32,32,32,32,41,7,114,252,0,0,0,114,163,0,0,0, - 218,18,101,120,116,101,110,115,105,111,110,95,115,117,102,102, - 105,120,101,115,114,255,0,0,0,114,101,0,0,0,114,5, - 1,0,0,114,88,0,0,0,41,3,90,10,101,120,116,101, - 110,115,105,111,110,115,90,6,115,111,117,114,99,101,90,8, - 98,121,116,101,99,111,100,101,114,5,0,0,0,114,5,0, - 0,0,114,8,0,0,0,114,184,0,0,0,6,6,0,0, - 115,8,0,0,0,0,5,12,1,8,1,8,1,114,184,0, - 0,0,99,1,0,0,0,0,0,0,0,0,0,0,0,10, - 0,0,0,9,0,0,0,67,0,0,0,115,132,1,0,0, - 124,0,97,0,116,0,106,1,97,1,116,0,106,2,97,2, - 116,1,106,3,116,4,25,0,125,1,100,1,100,2,103,1, - 102,2,100,3,100,4,100,2,103,2,102,2,102,2,125,2, - 124,2,68,0,93,108,92,2,125,3,125,4,116,5,100,5, - 100,6,132,0,124,4,68,0,131,1,131,1,115,82,74,0, - 130,1,124,4,100,7,25,0,125,5,124,3,116,1,106,3, - 118,0,114,116,116,1,106,3,124,3,25,0,125,6,1,0, - 113,170,113,52,122,20,116,0,160,6,124,3,161,1,125,6, - 87,0,1,0,113,170,87,0,113,52,4,0,116,7,121,158, - 1,0,1,0,1,0,89,0,113,52,89,0,113,52,48,0, - 113,52,116,7,100,8,131,1,130,1,116,8,124,1,100,9, - 124,6,131,3,1,0,116,8,124,1,100,10,124,5,131,3, - 1,0,116,8,124,1,100,11,100,12,160,9,124,4,161,1, - 131,3,1,0,116,8,124,1,100,13,100,14,100,15,132,0, - 124,4,68,0,131,1,131,3,1,0,103,0,100,16,162,1, - 125,7,124,3,100,3,107,2,144,1,114,6,124,7,160,10, - 100,17,161,1,1,0,124,7,68,0,93,52,125,8,124,8, - 116,1,106,3,118,1,144,1,114,38,116,0,160,6,124,8, - 161,1,125,9,110,10,116,1,106,3,124,8,25,0,125,9, - 116,8,124,1,124,8,124,9,131,3,1,0,144,1,113,10, - 116,8,124,1,100,18,116,11,131,0,131,3,1,0,116,12, - 160,13,116,2,160,14,161,0,161,1,1,0,124,3,100,3, - 107,2,144,1,114,128,116,15,160,10,100,19,161,1,1,0, - 100,20,116,12,118,0,144,1,114,128,100,21,116,16,95,17, - 100,22,83,0,41,23,122,205,83,101,116,117,112,32,116,104, - 101,32,112,97,116,104,45,98,97,115,101,100,32,105,109,112, - 111,114,116,101,114,115,32,102,111,114,32,105,109,112,111,114, - 116,108,105,98,32,98,121,32,105,109,112,111,114,116,105,110, - 103,32,110,101,101,100,101,100,10,32,32,32,32,98,117,105, - 108,116,45,105,110,32,109,111,100,117,108,101,115,32,97,110, - 100,32,105,110,106,101,99,116,105,110,103,32,116,104,101,109, - 32,105,110,116,111,32,116,104,101,32,103,108,111,98,97,108, - 32,110,97,109,101,115,112,97,99,101,46,10,10,32,32,32, - 32,79,116,104,101,114,32,99,111,109,112,111,110,101,110,116, - 115,32,97,114,101,32,101,120,116,114,97,99,116,101,100,32, - 102,114,111,109,32,116,104,101,32,99,111,114,101,32,98,111, - 111,116,115,116,114,97,112,32,109,111,100,117,108,101,46,10, - 10,32,32,32,32,90,5,112,111,115,105,120,250,1,47,90, - 2,110,116,250,1,92,99,1,0,0,0,0,0,0,0,0, - 0,0,0,2,0,0,0,3,0,0,0,115,0,0,0,115, - 26,0,0,0,124,0,93,18,125,1,116,0,124,1,131,1, - 100,0,107,2,86,0,1,0,113,2,100,1,83,0,41,2, - 114,39,0,0,0,78,41,1,114,23,0,0,0,41,2,114, - 32,0,0,0,114,94,0,0,0,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,10,1,0,0,35,6,0, - 0,114,56,1,0,0,122,25,95,115,101,116,117,112,46,60, - 108,111,99,97,108,115,62,46,60,103,101,110,101,120,112,114, - 62,114,73,0,0,0,122,30,105,109,112,111,114,116,108,105, - 98,32,114,101,113,117,105,114,101,115,32,112,111,115,105,120, - 32,111,114,32,110,116,114,4,0,0,0,114,35,0,0,0, - 114,31,0,0,0,114,40,0,0,0,114,58,0,0,0,99, - 1,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0, - 4,0,0,0,83,0,0,0,115,22,0,0,0,104,0,124, - 0,93,14,125,1,100,0,124,1,155,0,157,2,146,2,113, - 4,83,0,41,1,114,74,0,0,0,114,5,0,0,0,41, - 2,114,32,0,0,0,218,1,115,114,5,0,0,0,114,5, - 0,0,0,114,8,0,0,0,114,64,1,0,0,52,6,0, - 0,114,56,1,0,0,122,25,95,115,101,116,117,112,46,60, - 108,111,99,97,108,115,62,46,60,115,101,116,99,111,109,112, - 62,41,3,114,64,0,0,0,114,75,0,0,0,114,160,0, - 0,0,114,192,0,0,0,114,9,0,0,0,122,4,46,112, - 121,119,122,6,95,100,46,112,121,100,84,78,41,18,114,134, - 0,0,0,114,1,0,0,0,114,163,0,0,0,114,22,1, - 0,0,114,125,0,0,0,218,3,97,108,108,90,18,95,98, - 117,105,108,116,105,110,95,102,114,111,109,95,110,97,109,101, - 114,117,0,0,0,114,129,0,0,0,114,36,0,0,0,114, - 186,0,0,0,114,14,0,0,0,114,12,1,0,0,114,167, - 0,0,0,114,76,1,0,0,114,101,0,0,0,114,191,0, - 0,0,114,195,0,0,0,41,10,218,17,95,98,111,111,116, - 115,116,114,97,112,95,109,111,100,117,108,101,90,11,115,101, - 108,102,95,109,111,100,117,108,101,90,10,111,115,95,100,101, - 116,97,105,108,115,90,10,98,117,105,108,116,105,110,95,111, - 115,114,31,0,0,0,114,35,0,0,0,90,9,111,115,95, - 109,111,100,117,108,101,90,13,98,117,105,108,116,105,110,95, - 110,97,109,101,115,90,12,98,117,105,108,116,105,110,95,110, - 97,109,101,90,14,98,117,105,108,116,105,110,95,109,111,100, - 117,108,101,114,5,0,0,0,114,5,0,0,0,114,8,0, - 0,0,218,6,95,115,101,116,117,112,17,6,0,0,115,70, - 0,0,0,0,8,4,1,6,1,6,2,10,3,22,1,12, - 2,22,1,8,1,10,1,10,1,6,2,2,1,10,1,10, - 1,12,1,12,2,8,2,12,1,12,1,18,1,22,3,8, - 1,10,1,10,1,8,1,12,1,12,2,10,1,16,3,14, - 1,14,1,10,1,10,1,10,1,114,82,1,0,0,99,1, - 0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,4, - 0,0,0,67,0,0,0,115,50,0,0,0,116,0,124,0, - 131,1,1,0,116,1,131,0,125,1,116,2,106,3,160,4, - 116,5,106,6,124,1,142,0,103,1,161,1,1,0,116,2, - 106,7,160,8,116,9,161,1,1,0,100,1,83,0,41,2, - 122,41,73,110,115,116,97,108,108,32,116,104,101,32,112,97, - 116,104,45,98,97,115,101,100,32,105,109,112,111,114,116,32, - 99,111,109,112,111,110,101,110,116,115,46,78,41,10,114,82, - 1,0,0,114,184,0,0,0,114,1,0,0,0,114,43,1, - 0,0,114,167,0,0,0,114,54,1,0,0,114,70,1,0, - 0,218,9,109,101,116,97,95,112,97,116,104,114,186,0,0, - 0,114,37,1,0,0,41,2,114,81,1,0,0,90,17,115, - 117,112,112,111,114,116,101,100,95,108,111,97,100,101,114,115, - 114,5,0,0,0,114,5,0,0,0,114,8,0,0,0,218, - 8,95,105,110,115,116,97,108,108,74,6,0,0,115,8,0, - 0,0,0,2,8,1,6,1,20,1,114,84,1,0,0,41, - 1,114,60,0,0,0,41,1,78,41,3,78,78,78,41,2, - 114,73,0,0,0,114,73,0,0,0,41,1,84,41,1,78, - 41,1,78,41,63,114,127,0,0,0,114,13,0,0,0,90, - 37,95,67,65,83,69,95,73,78,83,69,78,83,73,84,73, - 86,69,95,80,76,65,84,70,79,82,77,83,95,66,89,84, - 69,83,95,75,69,89,114,12,0,0,0,114,14,0,0,0, - 114,21,0,0,0,114,27,0,0,0,114,29,0,0,0,114, - 38,0,0,0,114,47,0,0,0,114,49,0,0,0,114,53, - 0,0,0,114,54,0,0,0,114,56,0,0,0,114,59,0, - 0,0,114,69,0,0,0,218,4,116,121,112,101,218,8,95, - 95,99,111,100,101,95,95,114,162,0,0,0,114,19,0,0, - 0,114,148,0,0,0,114,18,0,0,0,114,24,0,0,0, - 114,236,0,0,0,114,91,0,0,0,114,87,0,0,0,114, - 101,0,0,0,114,88,0,0,0,90,23,68,69,66,85,71, - 95,66,89,84,69,67,79,68,69,95,83,85,70,70,73,88, - 69,83,90,27,79,80,84,73,77,73,90,69,68,95,66,89, - 84,69,67,79,68,69,95,83,85,70,70,73,88,69,83,114, - 97,0,0,0,114,102,0,0,0,114,108,0,0,0,114,112, - 0,0,0,114,114,0,0,0,114,136,0,0,0,114,143,0, - 0,0,114,152,0,0,0,114,156,0,0,0,114,158,0,0, - 0,114,165,0,0,0,114,170,0,0,0,114,171,0,0,0, - 114,176,0,0,0,218,6,111,98,106,101,99,116,114,185,0, - 0,0,114,190,0,0,0,114,191,0,0,0,114,208,0,0, - 0,114,221,0,0,0,114,239,0,0,0,114,255,0,0,0, - 114,5,1,0,0,114,12,1,0,0,114,252,0,0,0,114, - 13,1,0,0,114,35,1,0,0,114,37,1,0,0,114,54, - 1,0,0,114,75,1,0,0,114,184,0,0,0,114,82,1, - 0,0,114,84,1,0,0,114,5,0,0,0,114,5,0,0, - 0,114,5,0,0,0,114,8,0,0,0,218,8,60,109,111, - 100,117,108,101,62,1,0,0,0,115,126,0,0,0,4,22, - 4,1,4,1,2,1,2,255,4,4,8,17,8,5,8,5, - 8,6,8,6,8,12,8,10,8,9,8,5,8,7,8,9, - 10,22,10,127,0,20,16,1,12,2,4,1,4,2,6,2, - 6,2,8,2,16,71,8,40,8,19,8,12,8,12,8,28, - 8,17,8,33,8,28,8,24,10,13,10,10,10,11,8,14, - 6,3,4,1,2,255,12,68,14,64,14,29,16,127,0,17, - 14,50,18,45,18,26,4,3,18,53,14,63,14,42,14,127, - 0,20,14,127,0,22,10,23,8,11,8,57, + 0,0,0,218,8,60,109,111,100,117,108,101,62,1,0,0, + 0,115,126,0,0,0,4,22,4,1,4,1,2,1,2,255, + 4,4,8,17,8,5,8,5,8,6,8,6,8,12,8,10, + 8,9,8,5,8,7,8,9,10,22,10,127,0,20,16,1, + 12,2,4,1,4,2,6,2,6,2,8,2,16,71,8,40, + 8,19,8,12,8,12,8,28,8,17,8,33,8,28,8,24, + 10,13,10,10,10,11,8,14,6,3,4,1,2,255,12,68, + 14,64,14,29,16,127,0,17,14,50,18,45,18,26,4,3, + 18,53,14,63,14,42,14,127,0,20,14,127,0,22,10,23, + 8,11,8,57, }; From webhook-mailer at python.org Sat Jun 27 20:33:56 2020 From: webhook-mailer at python.org (Guido van Rossum) Date: Sun, 28 Jun 2020 00:33:56 -0000 Subject: [Python-checkins] bpo-35975: Only use cf_feature_version if PyCF_ONLY_AST in cf_flags (#21021) Message-ID: https://github.com/python/cpython/commit/9d197c7d48147a9ea2f7f7be917f35514a16524b commit: 9d197c7d48147a9ea2f7f7be917f35514a16524b branch: master author: Guido van Rossum committer: GitHub date: 2020-06-27T17:33:49-07:00 summary: bpo-35975: Only use cf_feature_version if PyCF_ONLY_AST in cf_flags (#21021) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-00-44.bpo-35975.UDHCHp.rst M Lib/test/test_capi.py M Modules/_testcapimodule.c M Parser/pegen.c diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 73e167a0b05a5..fa5ca1c97f458 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -627,6 +627,27 @@ def test_subinterps(self): self.assertNotEqual(pickle.load(f), id(sys.modules)) self.assertNotEqual(pickle.load(f), id(builtins)) + def test_subinterps_recent_language_features(self): + r, w = os.pipe() + code = """if 1: + import pickle + with open({:d}, "wb") as f: + + @(lambda x:x) # Py 3.9 + def noop(x): return x + + a = (b := f'1{{2}}3') + noop('x') # Py 3.8 (:=) / 3.6 (f'') + + async def foo(arg): return await arg # Py 3.5 + + pickle.dump(dict(a=a, b=b), f) + """.format(w) + + with open(r, "rb") as f: + ret = support.run_in_subinterp(code) + self.assertEqual(ret, 0) + self.assertEqual(pickle.load(f), {'a': '123x', 'b': '123'}) + def test_mutate_exception(self): """ Exceptions saved in global module state get shared between diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-00-44.bpo-35975.UDHCHp.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-00-44.bpo-35975.UDHCHp.rst new file mode 100644 index 0000000000000..73f4a6da2e5c0 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-00-44.bpo-35975.UDHCHp.rst @@ -0,0 +1,3 @@ +Stefan Behnel reported that cf_feature_version is used even when +PyCF_ONLY_AST is not set. This is against the intention and against the +documented behavior, so it's been fixed. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index adc5877c48a24..aafbc392a4846 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -3468,6 +3468,8 @@ run_in_subinterp(PyObject *self, PyObject *args) const char *code; int r; PyThreadState *substate, *mainstate; + /* only initialise 'cflags.cf_flags' to test backwards compatibility */ + PyCompilerFlags cflags = {0}; if (!PyArg_ParseTuple(args, "s:run_in_subinterp", &code)) @@ -3486,7 +3488,7 @@ run_in_subinterp(PyObject *self, PyObject *args) PyErr_SetString(PyExc_RuntimeError, "sub-interpreter creation failed"); return NULL; } - r = PyRun_SimpleString(code); + r = PyRun_SimpleStringFlags(code, &cflags); Py_EndInterpreter(substate); PyThreadState_Swap(mainstate); diff --git a/Parser/pegen.c b/Parser/pegen.c index 19762b06d3caf..53e3d49138306 100644 --- a/Parser/pegen.c +++ b/Parser/pegen.c @@ -1042,7 +1042,7 @@ compute_parser_flags(PyCompilerFlags *flags) if (flags->cf_flags & PyCF_TYPE_COMMENTS) { parser_flags |= PyPARSE_TYPE_COMMENTS; } - if (flags->cf_feature_version < 7) { + if ((flags->cf_flags & PyCF_ONLY_AST) && flags->cf_feature_version < 7) { parser_flags |= PyPARSE_ASYNC_HACKS; } return parser_flags; @@ -1215,7 +1215,8 @@ _PyPegen_run_parser_from_string(const char *str, int start_rule, PyObject *filen mod_ty result = NULL; int parser_flags = compute_parser_flags(flags); - int feature_version = flags ? flags->cf_feature_version : PY_MINOR_VERSION; + int feature_version = flags && (flags->cf_flags & PyCF_ONLY_AST) ? + flags->cf_feature_version : PY_MINOR_VERSION; Parser *p = _PyPegen_Parser_New(tok, start_rule, parser_flags, feature_version, NULL, arena); if (p == NULL) { From webhook-mailer at python.org Sat Jun 27 20:35:09 2020 From: webhook-mailer at python.org (Guido van Rossum) Date: Sun, 28 Jun 2020 00:35:09 -0000 Subject: [Python-checkins] [3.8] bpo-35975: Only use cf_feature_version if PyCF_ONLY_AST in cf_flags (#21023) Message-ID: https://github.com/python/cpython/commit/e653369e76d7da6bcbcf1f09a141f47fb77df6c3 commit: e653369e76d7da6bcbcf1f09a141f47fb77df6c3 branch: 3.8 author: Guido van Rossum committer: GitHub date: 2020-06-27T17:35:05-07:00 summary: [3.8] bpo-35975: Only use cf_feature_version if PyCF_ONLY_AST in cf_flags (#21023) files: A Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-35975.UDHCHp.rst M Lib/test/test_capi.py M Modules/_testcapimodule.c M Python/ast.c M Python/pythonrun.c diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py index 584c104645031..d1506bc17732f 100644 --- a/Lib/test/test_capi.py +++ b/Lib/test/test_capi.py @@ -584,6 +584,26 @@ def test_subinterps(self): self.assertNotEqual(pickle.load(f), id(sys.modules)) self.assertNotEqual(pickle.load(f), id(builtins)) + def test_subinterps_recent_language_features(self): + r, w = os.pipe() + code = """if 1: + import pickle + with open({:d}, "wb") as f: + + def noop(x): return x + + a = (b := f'1{{2}}3') + noop('x') # Py 3.8 (:=) / 3.6 (f'') + + async def foo(arg): return await arg # Py 3.5 + + pickle.dump(dict(a=a, b=b), f) + """.format(w) + + with open(r, "rb") as f: + ret = support.run_in_subinterp(code) + self.assertEqual(ret, 0) + self.assertEqual(pickle.load(f), {'a': '123x', 'b': '123'}) + def test_mutate_exception(self): """ Exceptions saved in global module state get shared between diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-35975.UDHCHp.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-35975.UDHCHp.rst new file mode 100644 index 0000000000000..73f4a6da2e5c0 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-35975.UDHCHp.rst @@ -0,0 +1,3 @@ +Stefan Behnel reported that cf_feature_version is used even when +PyCF_ONLY_AST is not set. This is against the intention and against the +documented behavior, so it's been fixed. diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index da3579c2cc6fd..f74756163f863 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -3345,6 +3345,8 @@ run_in_subinterp(PyObject *self, PyObject *args) const char *code; int r; PyThreadState *substate, *mainstate; + /* only initialise 'cflags.cf_flags' to test backwards compatibility */ + PyCompilerFlags cflags = {0}; if (!PyArg_ParseTuple(args, "s:run_in_subinterp", &code)) @@ -3363,7 +3365,7 @@ run_in_subinterp(PyObject *self, PyObject *args) PyErr_SetString(PyExc_RuntimeError, "sub-interpreter creation failed"); return NULL; } - r = PyRun_SimpleString(code); + r = PyRun_SimpleStringFlags(code, &cflags); Py_EndInterpreter(substate); PyThreadState_Swap(mainstate); diff --git a/Python/ast.c b/Python/ast.c index 5efb690c299ca..7c1d24dea7184 100644 --- a/Python/ast.c +++ b/Python/ast.c @@ -808,7 +808,8 @@ PyAST_FromNodeObject(const node *n, PyCompilerFlags *flags, /* borrowed reference */ c.c_filename = filename; c.c_normalize = NULL; - c.c_feature_version = flags ? flags->cf_feature_version : PY_MINOR_VERSION; + c.c_feature_version = flags && (flags->cf_flags & PyCF_ONLY_AST) ? + flags->cf_feature_version : PY_MINOR_VERSION; if (TYPE(n) == encoding_decl) n = CHILD(n, 0); diff --git a/Python/pythonrun.c b/Python/pythonrun.c index a7da143077a7a..6cdd8ea7a6ab1 100644 --- a/Python/pythonrun.c +++ b/Python/pythonrun.c @@ -1337,7 +1337,7 @@ PyParser_ASTFromStringObject(const char *s, PyObject *filename, int start, PyCompilerFlags localflags = _PyCompilerFlags_INIT; perrdetail err; int iflags = PARSER_FLAGS(flags); - if (flags && flags->cf_feature_version < 7) + if (flags && (flags->cf_flags & PyCF_ONLY_AST) && flags->cf_feature_version < 7) iflags |= PyPARSE_ASYNC_HACKS; node *n = PyParser_ParseStringObject(s, filename, From webhook-mailer at python.org Sat Jun 27 20:55:54 2020 From: webhook-mailer at python.org (Pablo Galindo) Date: Sun, 28 Jun 2020 00:55:54 -0000 Subject: [Python-checkins] bpo-39151: Simplify DFS in the assembler (GH-17733) Message-ID: https://github.com/python/cpython/commit/60eb9f1ab59a59ddf81d3da3513cfa3251169b5c commit: 60eb9f1ab59a59ddf81d3da3513cfa3251169b5c branch: master author: Pablo Galindo committer: GitHub date: 2020-06-28T01:55:47+01:00 summary: bpo-39151: Simplify DFS in the assembler (GH-17733) files: M Python/compile.c diff --git a/Python/compile.c b/Python/compile.c index 8fe82f91559e0..4a9b511961e5e 100644 --- a/Python/compile.c +++ b/Python/compile.c @@ -67,8 +67,6 @@ typedef struct basicblock_ { /* If b_next is non-NULL, it is a pointer to the next block reached by normal control flow. */ struct basicblock_ *b_next; - /* b_seen is used to perform a DFS of basicblocks. */ - unsigned b_seen : 1; /* b_return is true if a RETURN_VALUE opcode is inserted. */ unsigned b_return : 1; /* depth of stack upon entry of block, computed by stackdepth() */ @@ -5413,7 +5411,7 @@ struct assembler { PyObject *a_bytecode; /* string containing bytecode */ int a_offset; /* offset into bytecode */ int a_nblocks; /* number of reachable blocks */ - basicblock **a_postorder; /* list of blocks in dfs postorder */ + basicblock **a_reverse_postorder; /* list of blocks in dfs postorder */ PyObject *a_lnotab; /* string containing lnotab */ int a_lnotab_off; /* offset into lnotab */ int a_lineno; /* last lineno of emitted instruction */ @@ -5423,26 +5421,14 @@ struct assembler { static void dfs(struct compiler *c, basicblock *b, struct assembler *a, int end) { - int i, j; - /* Get rid of recursion for normal control flow. - Since the number of blocks is limited, unused space in a_postorder - (from a_nblocks to end) can be used as a stack for still not ordered - blocks. */ - for (j = end; b && !b->b_seen; b = b->b_next) { - b->b_seen = 1; - assert(a->a_nblocks < j); - a->a_postorder[--j] = b; - } - while (j < end) { - b = a->a_postorder[j++]; - for (i = 0; i < b->b_iused; i++) { - struct instr *instr = &b->b_instr[i]; - if (instr->i_jrel || instr->i_jabs) - dfs(c, instr->i_target, a, j); - } - assert(a->a_nblocks < j); - a->a_postorder[a->a_nblocks++] = b; + /* There is no real depth-first-search to do here because all the + * blocks are emitted in topological order already, so we just need to + * follow the b_next pointers and place them in a->a_reverse_postorder in + * reverse order and make sure that the first one starts at 0. */ + + for (a->a_nblocks = 0; b != NULL; b = b->b_next) { + a->a_reverse_postorder[a->a_nblocks++] = b; } } @@ -5543,9 +5529,9 @@ assemble_init(struct assembler *a, int nblocks, int firstlineno) PyErr_NoMemory(); return 0; } - a->a_postorder = (basicblock **)PyObject_Malloc( + a->a_reverse_postorder = (basicblock **)PyObject_Malloc( sizeof(basicblock *) * nblocks); - if (!a->a_postorder) { + if (!a->a_reverse_postorder) { PyErr_NoMemory(); return 0; } @@ -5557,8 +5543,8 @@ assemble_free(struct assembler *a) { Py_XDECREF(a->a_bytecode); Py_XDECREF(a->a_lnotab); - if (a->a_postorder) - PyObject_Free(a->a_postorder); + if (a->a_reverse_postorder) + PyObject_Free(a->a_reverse_postorder); } static int @@ -5719,8 +5705,8 @@ assemble_jump_offsets(struct assembler *a, struct compiler *c) Replace block pointer with position in bytecode. */ do { totsize = 0; - for (i = a->a_nblocks - 1; i >= 0; i--) { - b = a->a_postorder[i]; + for (i = 0; i < a->a_nblocks; i++) { + b = a->a_reverse_postorder[i]; bsize = blocksize(b); b->b_offset = totsize; totsize += bsize; @@ -5975,10 +5961,9 @@ dump_instr(const struct instr *i) static void dump_basicblock(const basicblock *b) { - const char *seen = b->b_seen ? "seen " : ""; const char *b_return = b->b_return ? "return " : ""; - fprintf(stderr, "used: %d, depth: %d, offset: %d %s%s\n", - b->b_iused, b->b_startdepth, b->b_offset, seen, b_return); + fprintf(stderr, "used: %d, depth: %d, offset: %d %s\n", + b->b_iused, b->b_startdepth, b->b_offset, b_return); if (b->b_instr) { int i; for (i = 0; i < b->b_iused; i++) { @@ -6030,8 +6015,8 @@ assemble(struct compiler *c, int addNone) assemble_jump_offsets(&a, c); /* Emit code in reverse postorder from dfs. */ - for (i = a.a_nblocks - 1; i >= 0; i--) { - b = a.a_postorder[i]; + for (i = 0; i < a.a_nblocks; i++) { + b = a.a_reverse_postorder[i]; for (j = 0; j < b->b_iused; j++) if (!assemble_emit(&a, &b->b_instr[j])) goto error; From webhook-mailer at python.org Sat Jun 27 21:11:47 2020 From: webhook-mailer at python.org (Batuhan Taskaya) Date: Sun, 28 Jun 2020 01:11:47 -0000 Subject: [Python-checkins] bpo-38870: Extend subject of ast.unparse warnings (GH-21053) Message-ID: https://github.com/python/cpython/commit/8df1016e2ef8c0a9f4d15bf7894c284295c99d9f commit: 8df1016e2ef8c0a9f4d15bf7894c284295c99d9f branch: master author: Batuhan Taskaya committer: GitHub date: 2020-06-28T02:11:43+01:00 summary: bpo-38870: Extend subject of ast.unparse warnings (GH-21053) - Mention that some compiler optimizations might not roundtrip exactly (such as constant tuples and frozensets). - Add a warning about it might raise RecursionError on very complex expressions due to recursive unparsing aspect of ast.unparse files: M Doc/library/ast.rst diff --git a/Doc/library/ast.rst b/Doc/library/ast.rst index 6c6ad01b842c8..25cb17811e718 100644 --- a/Doc/library/ast.rst +++ b/Doc/library/ast.rst @@ -1553,7 +1553,12 @@ and classes for traversing abstract syntax trees: .. warning:: The produced code string will not necessarily be equal to the original - code that generated the :class:`ast.AST` object. + code that generated the :class:`ast.AST` object (without any compiler + optimizations, such as constant tuples/frozensets). + + .. warning:: + Trying to unparse a highly complex expression would result with + :exc:`RecursionError`. .. versionadded:: 3.9 From webhook-mailer at python.org Sun Jun 28 02:02:55 2020 From: webhook-mailer at python.org (E-Paine) Date: Sun, 28 Jun 2020 06:02:55 -0000 Subject: [Python-checkins] bpo-41144: Fix IDLE open module error (#21182) Message-ID: https://github.com/python/cpython/commit/8ab77c6f9fb6ef86af8f6b8722a2fcb37438edd0 commit: 8ab77c6f9fb6ef86af8f6b8722a2fcb37438edd0 branch: master author: E-Paine <63801254+E-Paine at users.noreply.github.com> committer: GitHub date: 2020-06-28T02:02:47-04:00 summary: bpo-41144: Fix IDLE open module error (#21182) Could not open os.path. Co-authored-by: Terry Jan Reedy files: A Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/idle_test/test_query.py M Lib/idlelib/query.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 7982afa7d1f67..c270fcbae2bd1 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -3,6 +3,8 @@ Released on 2020-10-05? ====================================== +bpo-41144: Make Open Module open a special module such as os.path. + bpo-40723: Make test_idle pass when run after import. Patch by Florian Dahlitz. diff --git a/Lib/idlelib/idle_test/test_query.py b/Lib/idlelib/idle_test/test_query.py index 6d026cb532068..e968862688b95 100644 --- a/Lib/idlelib/idle_test/test_query.py +++ b/Lib/idlelib/idle_test/test_query.py @@ -136,6 +136,9 @@ def test_good_module_name(self): dialog = self.Dummy_ModuleName('idlelib') self.assertTrue(dialog.entry_ok().endswith('__init__.py')) self.assertEqual(dialog.entry_error['text'], '') + dialog = self.Dummy_ModuleName('os.path') + self.assertTrue(dialog.entry_ok().endswith('path.py')) + self.assertEqual(dialog.entry_error['text'], '') class GotoTest(unittest.TestCase): diff --git a/Lib/idlelib/query.py b/Lib/idlelib/query.py index 2a88530b4d082..015fc7ade459d 100644 --- a/Lib/idlelib/query.py +++ b/Lib/idlelib/query.py @@ -19,7 +19,7 @@ # HelpSource was extracted from configHelpSourceEdit.py (temporarily # config_help.py), with darwin code moved from ok to path_ok. -import importlib +import importlib.util, importlib.abc import os import shlex from sys import executable, platform # Platform is set for one test. @@ -57,7 +57,8 @@ def __init__(self, parent, title, message, *, text0='', used_names={}, self.withdraw() # Hide while configuring, especially geometry. self.title(title) self.transient(parent) - self.grab_set() + if not _utest: # Otherwise fail when directly run unittest. + self.grab_set() windowingsystem = self.tk.call('tk', 'windowingsystem') if windowingsystem == 'aqua': @@ -209,17 +210,23 @@ def entry_ok(self): self.showerror(str(msg)) return None if spec is None: - self.showerror("module not found") + self.showerror("module not found.") return None if not isinstance(spec.loader, importlib.abc.SourceLoader): - self.showerror("not a source-based module") + self.showerror("not a source-based module.") return None try: file_path = spec.loader.get_filename(name) except AttributeError: - self.showerror("loader does not support get_filename", - parent=self) + self.showerror("loader does not support get_filename.") return None + except ImportError: + # Some special modules require this (e.g. os.path) + try: + file_path = spec.loader.get_filename() + except TypeError: + self.showerror("loader failed to get filename.") + return None return file_path @@ -375,7 +382,7 @@ def cli_args_ok(self): return cli_args def entry_ok(self): - "Return apparently valid (cli_args, restart) or None" + "Return apparently valid (cli_args, restart) or None." cli_args = self.cli_args_ok() restart = self.restartvar.get() return None if cli_args is None else (cli_args, restart) diff --git a/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst b/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst new file mode 100644 index 0000000000000..ed558d3e7ded1 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst @@ -0,0 +1 @@ +Make Open Module open a special module such as os.path. From webhook-mailer at python.org Sun Jun 28 02:20:20 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Sun, 28 Jun 2020 06:20:20 -0000 Subject: [Python-checkins] bpo-41144: Fix IDLE open module error (GH-21182) Message-ID: https://github.com/python/cpython/commit/86ef6fe2b64360a1a55a913a09b12f0a80e8c06d commit: 86ef6fe2b64360a1a55a913a09b12f0a80e8c06d branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-27T23:20:13-07:00 summary: bpo-41144: Fix IDLE open module error (GH-21182) Could not open os.path. Co-authored-by: Terry Jan Reedy (cherry picked from commit 8ab77c6f9fb6ef86af8f6b8722a2fcb37438edd0) Co-authored-by: E-Paine <63801254+E-Paine at users.noreply.github.com> files: A Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/idle_test/test_query.py M Lib/idlelib/query.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index edd00d4cdac1e..584fd4631fbc2 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -3,6 +3,8 @@ Released on 2020-07-03? ====================================== +bpo-41144: Make Open Module open a special module such as os.path. + bpo-40723: Make test_idle pass when run after import. Patch by Florian Dahlitz. diff --git a/Lib/idlelib/idle_test/test_query.py b/Lib/idlelib/idle_test/test_query.py index 6d026cb532068..e968862688b95 100644 --- a/Lib/idlelib/idle_test/test_query.py +++ b/Lib/idlelib/idle_test/test_query.py @@ -136,6 +136,9 @@ def test_good_module_name(self): dialog = self.Dummy_ModuleName('idlelib') self.assertTrue(dialog.entry_ok().endswith('__init__.py')) self.assertEqual(dialog.entry_error['text'], '') + dialog = self.Dummy_ModuleName('os.path') + self.assertTrue(dialog.entry_ok().endswith('path.py')) + self.assertEqual(dialog.entry_error['text'], '') class GotoTest(unittest.TestCase): diff --git a/Lib/idlelib/query.py b/Lib/idlelib/query.py index 2a88530b4d082..015fc7ade459d 100644 --- a/Lib/idlelib/query.py +++ b/Lib/idlelib/query.py @@ -19,7 +19,7 @@ # HelpSource was extracted from configHelpSourceEdit.py (temporarily # config_help.py), with darwin code moved from ok to path_ok. -import importlib +import importlib.util, importlib.abc import os import shlex from sys import executable, platform # Platform is set for one test. @@ -57,7 +57,8 @@ def __init__(self, parent, title, message, *, text0='', used_names={}, self.withdraw() # Hide while configuring, especially geometry. self.title(title) self.transient(parent) - self.grab_set() + if not _utest: # Otherwise fail when directly run unittest. + self.grab_set() windowingsystem = self.tk.call('tk', 'windowingsystem') if windowingsystem == 'aqua': @@ -209,17 +210,23 @@ def entry_ok(self): self.showerror(str(msg)) return None if spec is None: - self.showerror("module not found") + self.showerror("module not found.") return None if not isinstance(spec.loader, importlib.abc.SourceLoader): - self.showerror("not a source-based module") + self.showerror("not a source-based module.") return None try: file_path = spec.loader.get_filename(name) except AttributeError: - self.showerror("loader does not support get_filename", - parent=self) + self.showerror("loader does not support get_filename.") return None + except ImportError: + # Some special modules require this (e.g. os.path) + try: + file_path = spec.loader.get_filename() + except TypeError: + self.showerror("loader failed to get filename.") + return None return file_path @@ -375,7 +382,7 @@ def cli_args_ok(self): return cli_args def entry_ok(self): - "Return apparently valid (cli_args, restart) or None" + "Return apparently valid (cli_args, restart) or None." cli_args = self.cli_args_ok() restart = self.restartvar.get() return None if cli_args is None else (cli_args, restart) diff --git a/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst b/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst new file mode 100644 index 0000000000000..ed558d3e7ded1 --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst @@ -0,0 +1 @@ +Make Open Module open a special module such as os.path. From webhook-mailer at python.org Sun Jun 28 02:41:02 2020 From: webhook-mailer at python.org (Zackery Spytz) Date: Sun, 28 Jun 2020 06:41:02 -0000 Subject: [Python-checkins] bpo-31082: Use "iterable" in the docstring for functools.reduce() (GH-20796) Message-ID: https://github.com/python/cpython/commit/cd3c2bdd5d53db7fe1d546543d32000070916552 commit: cd3c2bdd5d53db7fe1d546543d32000070916552 branch: master author: Zackery Spytz committer: GitHub date: 2020-06-28T15:40:54+09:00 summary: bpo-31082: Use "iterable" in the docstring for functools.reduce() (GH-20796) files: A Misc/NEWS.d/next/Library/2020-06-25-10-11-47.bpo-31082.HsgDkx.rst M Lib/functools.py M Modules/_functoolsmodule.c diff --git a/Lib/functools.py b/Lib/functools.py index 5cab497d26403..b1f1fe8d9a6f2 100644 --- a/Lib/functools.py +++ b/Lib/functools.py @@ -236,14 +236,14 @@ def __ge__(self, other): def reduce(function, sequence, initial=_initial_missing): """ - reduce(function, sequence[, initial]) -> value + reduce(function, iterable[, initial]) -> value - Apply a function of two arguments cumulatively to the items of a sequence, - from left to right, so as to reduce the sequence to a single value. - For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates + Apply a function of two arguments cumulatively to the items of a sequence + or iterable, from left to right, so as to reduce the iterable to a single + value. For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates ((((1+2)+3)+4)+5). If initial is present, it is placed before the items - of the sequence in the calculation, and serves as a default when the - sequence is empty. + of the iterable in the calculation, and serves as a default when the + iterable is empty. """ it = iter(sequence) @@ -252,7 +252,8 @@ def reduce(function, sequence, initial=_initial_missing): try: value = next(it) except StopIteration: - raise TypeError("reduce() of empty sequence with no initial value") from None + raise TypeError( + "reduce() of empty iterable with no initial value") from None else: value = initial diff --git a/Misc/NEWS.d/next/Library/2020-06-25-10-11-47.bpo-31082.HsgDkx.rst b/Misc/NEWS.d/next/Library/2020-06-25-10-11-47.bpo-31082.HsgDkx.rst new file mode 100644 index 0000000000000..9746d33a49638 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-25-10-11-47.bpo-31082.HsgDkx.rst @@ -0,0 +1 @@ +Use the term "iterable" in the docstring for :func:`functools.reduce`. diff --git a/Modules/_functoolsmodule.c b/Modules/_functoolsmodule.c index 8120140afac05..bb86fe862da6d 100644 --- a/Modules/_functoolsmodule.c +++ b/Modules/_functoolsmodule.c @@ -679,7 +679,7 @@ functools_reduce(PyObject *self, PyObject *args) if (result == NULL) PyErr_SetString(PyExc_TypeError, - "reduce() of empty sequence with no initial value"); + "reduce() of empty iterable with no initial value"); Py_DECREF(it); return result; @@ -692,14 +692,14 @@ functools_reduce(PyObject *self, PyObject *args) } PyDoc_STRVAR(functools_reduce_doc, -"reduce(function, sequence[, initial]) -> value\n\ +"reduce(function, iterable[, initial]) -> value\n\ \n\ -Apply a function of two arguments cumulatively to the items of a sequence,\n\ -from left to right, so as to reduce the sequence to a single value.\n\ -For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates\n\ +Apply a function of two arguments cumulatively to the items of a sequence\n\ +or iterable, from left to right, so as to reduce the iterable to a single\n\ +value. For example, reduce(lambda x, y: x+y, [1, 2, 3, 4, 5]) calculates\n\ ((((1+2)+3)+4)+5). If initial is present, it is placed before the items\n\ -of the sequence in the calculation, and serves as a default when the\n\ -sequence is empty."); +of the iterable in the calculation, and serves as a default when the\n\ +iterable is empty."); /* lru_cache object **********************************************************/ From webhook-mailer at python.org Sun Jun 28 06:34:30 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sun, 28 Jun 2020 10:34:30 -0000 Subject: [Python-checkins] bpo-41138: Fix trace CLI for non-UTF-8 files. (GH-21177) Message-ID: https://github.com/python/cpython/commit/04cdeb7a5617c48102f45b965e683b12cdf934f8 commit: 04cdeb7a5617c48102f45b965e683b12cdf934f8 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-28T13:34:22+03:00 summary: bpo-41138: Fix trace CLI for non-UTF-8 files. (GH-21177) Fix also a resource warning when store counts and module info. files: A Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst M Lib/test/test_trace.py M Lib/trace.py diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py index 89d46376bff35..c03982ba72b3f 100644 --- a/Lib/test/test_trace.py +++ b/Lib/test/test_trace.py @@ -1,6 +1,6 @@ import os import sys -from test.support import TESTFN, rmtree, unlink, captured_stdout +from test.support import TESTFN, TESTFN_UNICODE, FS_NONASCII, rmtree, unlink, captured_stdout from test.support.script_helper import assert_python_ok, assert_python_failure import textwrap import unittest @@ -428,9 +428,10 @@ class TestCoverageCommandLineOutput(unittest.TestCase): coverfile = 'tmp.cover' def setUp(self): - with open(self.codefile, 'w') as f: + with open(self.codefile, 'w', encoding='iso-8859-15') as f: f.write(textwrap.dedent('''\ - x = 42 + # coding: iso-8859-15 + x = 'sp?m' if []: print('unreachable') ''')) @@ -451,9 +452,10 @@ def test_cover_files_written_no_highlight(self): self.assertEqual(stderr, b'') self.assertFalse(os.path.exists(tracecoverpath)) self.assertTrue(os.path.exists(self.coverfile)) - with open(self.coverfile) as f: + with open(self.coverfile, encoding='iso-8859-15') as f: self.assertEqual(f.read(), - " 1: x = 42\n" + " # coding: iso-8859-15\n" + " 1: x = 'sp?m'\n" " 1: if []:\n" " print('unreachable')\n" ) @@ -462,9 +464,10 @@ def test_cover_files_written_with_highlight(self): argv = '-m trace --count --missing'.split() + [self.codefile] status, stdout, stderr = assert_python_ok(*argv) self.assertTrue(os.path.exists(self.coverfile)) - with open(self.coverfile) as f: + with open(self.coverfile, encoding='iso-8859-15') as f: self.assertEqual(f.read(), textwrap.dedent('''\ - 1: x = 42 + # coding: iso-8859-15 + 1: x = 'sp?m' 1: if []: >>>>>> print('unreachable') ''')) @@ -485,15 +488,19 @@ def test_failures(self): self.assertIn(message, stderr) def test_listfuncs_flag_success(self): - with open(TESTFN, 'w') as fd: - self.addCleanup(unlink, TESTFN) + filename = TESTFN + '.py' + modulename = os.path.basename(TESTFN) + with open(filename, 'w', encoding='utf-8') as fd: + self.addCleanup(unlink, filename) fd.write("a = 1\n") - status, stdout, stderr = assert_python_ok('-m', 'trace', '-l', TESTFN, + status, stdout, stderr = assert_python_ok('-m', 'trace', '-l', filename, PYTHONIOENCODING='utf-8') self.assertIn(b'functions called:', stdout) + expected = f'filename: {filename}, modulename: {modulename}, funcname: ' + self.assertIn(expected.encode(), stdout) def test_sys_argv_list(self): - with open(TESTFN, 'w') as fd: + with open(TESTFN, 'w', encoding='utf-8') as fd: self.addCleanup(unlink, TESTFN) fd.write("import sys\n") fd.write("print(type(sys.argv))\n") @@ -506,7 +513,8 @@ def test_sys_argv_list(self): def test_count_and_summary(self): filename = f'{TESTFN}.py' coverfilename = f'{TESTFN}.cover' - with open(filename, 'w') as fd: + modulename = os.path.basename(TESTFN) + with open(filename, 'w', encoding='utf-8') as fd: self.addCleanup(unlink, filename) self.addCleanup(unlink, coverfilename) fd.write(textwrap.dedent("""\ @@ -524,7 +532,7 @@ def f(): stdout = stdout.decode() self.assertEqual(status, 0) self.assertIn('lines cov% module (path)', stdout) - self.assertIn(f'6 100% {TESTFN} ({filename})', stdout) + self.assertIn(f'6 100% {modulename} ({filename})', stdout) def test_run_as_module(self): assert_python_ok('-m', 'trace', '-l', '--module', 'timeit', '-n', '1') diff --git a/Lib/trace.py b/Lib/trace.py index 52047c3fbf473..c505d8bc72a98 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -287,8 +287,9 @@ def write_results(self, show_missing=True, summary=False, coverdir=None): if self.outfile: # try and store counts and module info into self.outfile try: - pickle.dump((self.counts, self.calledfuncs, self.callers), - open(self.outfile, 'wb'), 1) + with open(self.outfile, 'wb') as f: + pickle.dump((self.counts, self.calledfuncs, self.callers), + f, 1) except OSError as err: print("Can't save counts files because %s" % err, file=sys.stderr) @@ -715,7 +716,7 @@ def parse_ignore_dir(s): sys.argv = [opts.progname, *opts.arguments] sys.path[0] = os.path.dirname(opts.progname) - with open(opts.progname) as fp: + with open(opts.progname, 'rb') as fp: code = compile(fp.read(), opts.progname, 'exec') # try to emulate __main__ namespace as much as possible globs = { diff --git a/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst b/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst new file mode 100644 index 0000000000000..839d430e89b66 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst @@ -0,0 +1,2 @@ +Fixed the :mod:`trace` module CLI for Python source files with non-UTF-8 +encoding. From webhook-mailer at python.org Sun Jun 28 08:41:21 2020 From: webhook-mailer at python.org (Ned Deily) Date: Sun, 28 Jun 2020 12:41:21 -0000 Subject: [Python-checkins] 3.7.8 Message-ID: https://github.com/python/cpython/commit/4b47a5b6ba66b02df9392feb97b8ead916f8c1fa commit: 4b47a5b6ba66b02df9392feb97b8ead916f8c1fa branch: 3.7 author: Ned Deily committer: Ned Deily date: 2020-06-27T04:35:53-04:00 summary: 3.7.8 files: A Misc/NEWS.d/3.7.8.rst D Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst D Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst M Include/patchlevel.h M README.rst diff --git a/Include/patchlevel.h b/Include/patchlevel.h index 17d537092b483..c1bdb834d63ba 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -19,11 +19,11 @@ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 7 #define PY_MICRO_VERSION 8 -#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA -#define PY_RELEASE_SERIAL 1 +#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL +#define PY_RELEASE_SERIAL 0 /* Version as a string */ -#define PY_VERSION "3.7.8rc1+" +#define PY_VERSION "3.7.8" /*--end constants--*/ /* Version as a single 4-byte hex number, e.g. 0x010502B2 == 1.5.2b2. diff --git a/Misc/NEWS.d/3.7.8.rst b/Misc/NEWS.d/3.7.8.rst new file mode 100644 index 0000000000000..678d1c0199602 --- /dev/null +++ b/Misc/NEWS.d/3.7.8.rst @@ -0,0 +1,21 @@ +.. bpo: 41009 +.. date: 2020-06-17-17-27-07 +.. nonce: Rvn6OQ +.. release date: 2020-06-27 +.. section: Tests + +Fix use of ``support.require_{linux|mac|freebsd}_version()`` decorators as +class decorator. + +.. + +.. bpo: 41100 +.. date: 2020-06-24-13-51-57 +.. nonce: mcHdc5 +.. section: macOS + +Fix configure error when building on macOS 11. Note that 3.7.8 was released +shortly after the first developer preview of macOS 11 (Big Sur); there are +other known issues with building and running on the developer preview. Big +Sur is expected to be fully supported in a future bugfix release of Python +3.8.x and with 3.9.0. diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst deleted file mode 100644 index 1208c119a3556..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix use of ``support.require_{linux|mac|freebsd}_version()`` decorators as -class decorator. diff --git a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst deleted file mode 100644 index ded66b567a92d..0000000000000 --- a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst +++ /dev/null @@ -1 +0,0 @@ -Support macOS 11 when building. diff --git a/README.rst b/README.rst index 507b6c46b7ac3..557078399cff7 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -This is Python version 3.7.8 candidate 1+ -========================================= +This is Python version 3.7.8 +============================ .. image:: https://travis-ci.org/python/cpython.svg?branch=3.7 :alt: CPython build status on Travis CI From webhook-mailer at python.org Sun Jun 28 10:30:13 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Sun, 28 Jun 2020 14:30:13 -0000 Subject: [Python-checkins] [3.8] bpo-41138: Fix trace CLI for non-UTF-8 files. (GH-21177) (GH-21200) Message-ID: https://github.com/python/cpython/commit/cb53b8ce9c8660465b816f4d577720305b1283fa commit: cb53b8ce9c8660465b816f4d577720305b1283fa branch: 3.8 author: Serhiy Storchaka committer: GitHub date: 2020-06-28T17:30:08+03:00 summary: [3.8] bpo-41138: Fix trace CLI for non-UTF-8 files. (GH-21177) (GH-21200) Fix also a resource warning when store counts and module info. (cherry picked from commit 04cdeb7a5617c48102f45b965e683b12cdf934f8) files: A Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst M Lib/test/test_trace.py M Lib/trace.py diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py index 4bc21eae02ce1..8eacf99cbf46a 100644 --- a/Lib/test/test_trace.py +++ b/Lib/test/test_trace.py @@ -1,6 +1,6 @@ import os import sys -from test.support import TESTFN, rmtree, unlink, captured_stdout +from test.support import TESTFN, TESTFN_UNICODE, FS_NONASCII, rmtree, unlink, captured_stdout from test.support.script_helper import assert_python_ok, assert_python_failure import textwrap import unittest @@ -429,9 +429,10 @@ class TestCoverageCommandLineOutput(unittest.TestCase): coverfile = 'tmp.cover' def setUp(self): - with open(self.codefile, 'w') as f: + with open(self.codefile, 'w', encoding='iso-8859-15') as f: f.write(textwrap.dedent('''\ - x = 42 + # coding: iso-8859-15 + x = 'sp?m' if []: print('unreachable') ''')) @@ -452,9 +453,10 @@ def test_cover_files_written_no_highlight(self): self.assertEqual(stderr, b'') self.assertFalse(os.path.exists(tracecoverpath)) self.assertTrue(os.path.exists(self.coverfile)) - with open(self.coverfile) as f: + with open(self.coverfile, encoding='iso-8859-15') as f: self.assertEqual(f.read(), - " 1: x = 42\n" + " # coding: iso-8859-15\n" + " 1: x = 'sp?m'\n" " 1: if []:\n" " print('unreachable')\n" ) @@ -463,9 +465,10 @@ def test_cover_files_written_with_highlight(self): argv = '-m trace --count --missing'.split() + [self.codefile] status, stdout, stderr = assert_python_ok(*argv) self.assertTrue(os.path.exists(self.coverfile)) - with open(self.coverfile) as f: + with open(self.coverfile, encoding='iso-8859-15') as f: self.assertEqual(f.read(), textwrap.dedent('''\ - 1: x = 42 + # coding: iso-8859-15 + 1: x = 'sp?m' 1: if []: >>>>>> print('unreachable') ''')) @@ -486,14 +489,19 @@ def test_failures(self): self.assertIn(message, stderr) def test_listfuncs_flag_success(self): - with open(TESTFN, 'w') as fd: - self.addCleanup(unlink, TESTFN) + filename = TESTFN + '.py' + modulename = os.path.basename(TESTFN) + with open(filename, 'w', encoding='utf-8') as fd: + self.addCleanup(unlink, filename) fd.write("a = 1\n") - status, stdout, stderr = assert_python_ok('-m', 'trace', '-l', TESTFN) + status, stdout, stderr = assert_python_ok('-m', 'trace', '-l', filename, + PYTHONIOENCODING='utf-8') self.assertIn(b'functions called:', stdout) + expected = f'filename: {filename}, modulename: {modulename}, funcname: ' + self.assertIn(expected.encode(), stdout) def test_sys_argv_list(self): - with open(TESTFN, 'w') as fd: + with open(TESTFN, 'w', encoding='utf-8') as fd: self.addCleanup(unlink, TESTFN) fd.write("import sys\n") fd.write("print(type(sys.argv))\n") @@ -505,7 +513,8 @@ def test_sys_argv_list(self): def test_count_and_summary(self): filename = f'{TESTFN}.py' coverfilename = f'{TESTFN}.cover' - with open(filename, 'w') as fd: + modulename = os.path.basename(TESTFN) + with open(filename, 'w', encoding='utf-8') as fd: self.addCleanup(unlink, filename) self.addCleanup(unlink, coverfilename) fd.write(textwrap.dedent("""\ @@ -522,7 +531,7 @@ def f(): stdout = stdout.decode() self.assertEqual(status, 0) self.assertIn('lines cov% module (path)', stdout) - self.assertIn(f'6 100% {TESTFN} ({filename})', stdout) + self.assertIn(f'6 100% {modulename} ({filename})', stdout) def test_run_as_module(self): assert_python_ok('-m', 'trace', '-l', '--module', 'timeit', '-n', '1') diff --git a/Lib/trace.py b/Lib/trace.py index a44735761df42..89f17d485f35e 100755 --- a/Lib/trace.py +++ b/Lib/trace.py @@ -287,8 +287,9 @@ def write_results(self, show_missing=True, summary=False, coverdir=None): if self.outfile: # try and store counts and module info into self.outfile try: - pickle.dump((self.counts, self.calledfuncs, self.callers), - open(self.outfile, 'wb'), 1) + with open(self.outfile, 'wb') as f: + pickle.dump((self.counts, self.calledfuncs, self.callers), + f, 1) except OSError as err: print("Can't save counts files because %s" % err, file=sys.stderr) @@ -731,7 +732,7 @@ def parse_ignore_dir(s): sys.argv = [opts.progname, *opts.arguments] sys.path[0] = os.path.dirname(opts.progname) - with open(opts.progname) as fp: + with open(opts.progname, 'rb') as fp: code = compile(fp.read(), opts.progname, 'exec') # try to emulate __main__ namespace as much as possible globs = { diff --git a/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst b/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst new file mode 100644 index 0000000000000..839d430e89b66 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst @@ -0,0 +1,2 @@ +Fixed the :mod:`trace` module CLI for Python source files with non-UTF-8 +encoding. From webhook-mailer at python.org Sun Jun 28 16:01:09 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Sun, 28 Jun 2020 20:01:09 -0000 Subject: [Python-checkins] bpo-40874 Update the required libmpdec version for the decimal module (GH-21202) Message-ID: https://github.com/python/cpython/commit/8bea91b5e9ea07ca93958e131b436024f0b1b1cf commit: 8bea91b5e9ea07ca93958e131b436024f0b1b1cf branch: master author: Stefan Krah committer: GitHub date: 2020-06-28T22:01:01+02:00 summary: bpo-40874 Update the required libmpdec version for the decimal module (GH-21202) files: A Misc/NEWS.d/next/Library/2020-06-28-21-16-51.bpo-40874.YImvzA.rst M Modules/_decimal/_decimal.c diff --git a/Misc/NEWS.d/next/Library/2020-06-28-21-16-51.bpo-40874.YImvzA.rst b/Misc/NEWS.d/next/Library/2020-06-28-21-16-51.bpo-40874.YImvzA.rst new file mode 100644 index 0000000000000..a43eab8f4dcdd --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-28-21-16-51.bpo-40874.YImvzA.rst @@ -0,0 +1 @@ +The decimal module now requires libmpdec-2.5.0. diff --git a/Modules/_decimal/_decimal.c b/Modules/_decimal/_decimal.c index 20ba8fb77ad44..ff7c647c2220c 100644 --- a/Modules/_decimal/_decimal.c +++ b/Modules/_decimal/_decimal.c @@ -36,8 +36,8 @@ #include "docstrings.h" -#if !defined(MPD_VERSION_HEX) || MPD_VERSION_HEX < 0x02040100 - #error "libmpdec version >= 2.4.1 required" +#if !defined(MPD_VERSION_HEX) || MPD_VERSION_HEX < 0x02050000 + #error "libmpdec version >= 2.5.0 required" #endif From webhook-mailer at python.org Sun Jun 28 21:47:07 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 29 Jun 2020 01:47:07 -0000 Subject: [Python-checkins] bpo-41123: Remove PyUnicode_GetMax() (GH-21192) Message-ID: https://github.com/python/cpython/commit/d9f2a13106254c53550583adca70aeb3979f2993 commit: d9f2a13106254c53550583adca70aeb3979f2993 branch: master author: Inada Naoki committer: GitHub date: 2020-06-29T10:46:51+09:00 summary: bpo-41123: Remove PyUnicode_GetMax() (GH-21192) files: A Misc/NEWS.d/next/C API/2020-06-28-11-39-22.bpo-41123.sjJWjQ.rst M Doc/whatsnew/3.10.rst M Include/cpython/unicodeobject.h M Objects/unicodeobject.c diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 51e42ec6aba91..0dd33131f0430 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -223,3 +223,6 @@ Removed * ``Py_UNICODE_strncmp``: use :c:func:`PyUnicode_Tailmatch` * ``Py_UNICODE_strchr``, ``Py_UNICODE_strrchr``: use :c:func:`PyUnicode_FindChar` + +* Removed ``PyUnicode_GetMax()``. Please migrate to new (:pep:`393`) APIs. + (Contributed by Inada Naoki in :issue:`41103`.) diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index bcf99849f9f66..c1a8564349b99 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -593,9 +593,6 @@ Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE *) PyUnicode_AsUnicodeAndSize( Py_ssize_t *size /* location where to save the length */ ); -/* Get the maximum ordinal for a Unicode character. */ -Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE) PyUnicode_GetMax(void); - /* --- _PyUnicodeWriter API ----------------------------------------------- */ diff --git a/Misc/NEWS.d/next/C API/2020-06-28-11-39-22.bpo-41123.sjJWjQ.rst b/Misc/NEWS.d/next/C API/2020-06-28-11-39-22.bpo-41123.sjJWjQ.rst new file mode 100644 index 0000000000000..97331458c6ab9 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-28-11-39-22.bpo-41123.sjJWjQ.rst @@ -0,0 +1 @@ +Removed ``PyUnicode_GetMax()``. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index dc0f525c3bfdc..6fa6c3f6985cd 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -497,20 +497,6 @@ unicode_check_encoding_errors(const char *encoding, const char *errors) } -/* The max unicode value is always 0x10FFFF while using the PEP-393 API. - This function is kept for backward compatibility with the old API. */ -Py_UNICODE -PyUnicode_GetMax(void) -{ -#ifdef Py_UNICODE_WIDE - return 0x10FFFF; -#else - /* This is actually an illegal character, so it should - not be passed to unichr. */ - return 0xFFFF; -#endif -} - int _PyUnicode_CheckConsistency(PyObject *op, int check_content) { From webhook-mailer at python.org Mon Jun 29 00:00:56 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Mon, 29 Jun 2020 04:00:56 -0000 Subject: [Python-checkins] bpo-41123: Remove PyLong_FromUnicode() (GH-21204) Message-ID: https://github.com/python/cpython/commit/e4f1fe6edb216e04da03ae80b462ca273f00255b commit: e4f1fe6edb216e04da03ae80b462ca273f00255b branch: master author: Inada Naoki committer: GitHub date: 2020-06-29T13:00:43+09:00 summary: bpo-41123: Remove PyLong_FromUnicode() (GH-21204) files: A Misc/NEWS.d/next/C API/2020-06-29-11-33-49.bpo-41123.qFevek.rst M Doc/c-api/long.rst M Doc/data/refcounts.dat M Doc/whatsnew/3.10.rst M Include/longobject.h M Objects/abstract.c M Objects/longobject.c diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index a7bd43df90689..3921a93843e42 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -94,17 +94,6 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. are no digits, :exc:`ValueError` will be raised. -.. c:function:: PyObject* PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base) - - Convert a sequence of Unicode digits to a Python integer value. The Unicode - string is first encoded to a byte string using :c:func:`PyUnicode_EncodeDecimal` - and then converted using :c:func:`PyLong_FromString`. - - .. deprecated-removed:: 3.3 4.0 - Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using - :c:func:`PyLong_FromUnicodeObject`. - - .. c:function:: PyObject* PyLong_FromUnicodeObject(PyObject *u, int base) Convert a sequence of Unicode digits in the string *u* to a Python integer diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index 1215c96cd5342..4d9aee370c61d 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -1205,11 +1205,6 @@ PyLong_FromString:const char*:str:: PyLong_FromString:char**:pend:: PyLong_FromString:int:base:: -PyLong_FromUnicode:PyObject*::+1: -PyLong_FromUnicode:Py_UNICODE*:u:: -PyLong_FromUnicode:Py_ssize_t:length:: -PyLong_FromUnicode:int:base:: - PyLong_FromUnicodeObject:PyObject*::+1: PyLong_FromUnicodeObject:PyObject*:u:0: PyLong_FromUnicodeObject:int:base:: diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 0dd33131f0430..0c4ff026bd201 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -226,3 +226,6 @@ Removed * Removed ``PyUnicode_GetMax()``. Please migrate to new (:pep:`393`) APIs. (Contributed by Inada Naoki in :issue:`41103`.) + +* Removed ``PyLong_FromUnicode()``. Please migrate to :c:func:`PyLong_FromUnicodeObject`. + (Contributed by Inada Naoki in :issue:`41103`.) diff --git a/Include/longobject.h b/Include/longobject.h index dad08c23f8211..06e3e2490401e 100644 --- a/Include/longobject.h +++ b/Include/longobject.h @@ -102,8 +102,6 @@ PyAPI_FUNC(long long) PyLong_AsLongLongAndOverflow(PyObject *, int *); PyAPI_FUNC(PyObject *) PyLong_FromString(const char *, char **, int); #ifndef Py_LIMITED_API -Py_DEPRECATED(3.3) -PyAPI_FUNC(PyObject *) PyLong_FromUnicode(Py_UNICODE*, Py_ssize_t, int); PyAPI_FUNC(PyObject *) PyLong_FromUnicodeObject(PyObject *u, int base); PyAPI_FUNC(PyObject *) _PyLong_FromBytes(const char *, Py_ssize_t, int); #endif diff --git a/Misc/NEWS.d/next/C API/2020-06-29-11-33-49.bpo-41123.qFevek.rst b/Misc/NEWS.d/next/C API/2020-06-29-11-33-49.bpo-41123.qFevek.rst new file mode 100644 index 0000000000000..1f5813594b0ec --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-29-11-33-49.bpo-41123.qFevek.rst @@ -0,0 +1 @@ +Removed ``PyLong_FromUnicode()``. diff --git a/Objects/abstract.c b/Objects/abstract.c index 0d3f4ac6e1747..3494f33ce380c 100644 --- a/Objects/abstract.c +++ b/Objects/abstract.c @@ -1429,7 +1429,7 @@ PyNumber_Long(PyObject *o) return NULL; if (PyUnicode_Check(o)) - /* The below check is done in PyLong_FromUnicode(). */ + /* The below check is done in PyLong_FromUnicodeObject(). */ return PyLong_FromUnicodeObject(o, 10); if (PyBytes_Check(o)) diff --git a/Objects/longobject.c b/Objects/longobject.c index d00a7a048ddce..571f53a3c00eb 100644 --- a/Objects/longobject.c +++ b/Objects/longobject.c @@ -2503,17 +2503,6 @@ _PyLong_FromBytes(const char *s, Py_ssize_t len, int base) return NULL; } -PyObject * -PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base) -{ - PyObject *v, *unicode = PyUnicode_FromWideChar(u, length); - if (unicode == NULL) - return NULL; - v = PyLong_FromUnicodeObject(unicode, base); - Py_DECREF(unicode); - return v; -} - PyObject * PyLong_FromUnicodeObject(PyObject *u, int base) { From webhook-mailer at python.org Mon Jun 29 01:36:12 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 29 Jun 2020 05:36:12 -0000 Subject: [Python-checkins] bpo-41123: Doc: PyLong_FromUnicode will be removed in 3.10 (GH-21205) Message-ID: https://github.com/python/cpython/commit/ea164309dea4e7f92aeda6daa9e9679290c68827 commit: ea164309dea4e7f92aeda6daa9e9679290c68827 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-28T22:36:04-07:00 summary: bpo-41123: Doc: PyLong_FromUnicode will be removed in 3.10 (GH-21205) (cherry picked from commit 02134dae448c7885c9c07adfc6970f878db33372) Co-authored-by: Inada Naoki files: M Doc/c-api/long.rst diff --git a/Doc/c-api/long.rst b/Doc/c-api/long.rst index f41d419bd3775..a8a91e2678413 100644 --- a/Doc/c-api/long.rst +++ b/Doc/c-api/long.rst @@ -102,7 +102,7 @@ distinguished from a number. Use :c:func:`PyErr_Occurred` to disambiguate. string is first encoded to a byte string using :c:func:`PyUnicode_EncodeDecimal` and then converted using :c:func:`PyLong_FromString`. - .. deprecated-removed:: 3.3 4.0 + .. deprecated-removed:: 3.3 3.10 Part of the old-style :c:type:`Py_UNICODE` API; please migrate to using :c:func:`PyLong_FromUnicodeObject`. From webhook-mailer at python.org Mon Jun 29 03:34:44 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 29 Jun 2020 07:34:44 -0000 Subject: [Python-checkins] Update ssl.rst Message-ID: https://github.com/python/cpython/commit/12bb0b69ec237a4d6d666bb385d87eb61dbb2bf5 commit: 12bb0b69ec237a4d6d666bb385d87eb61dbb2bf5 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-29T00:34:35-07:00 summary: Update ssl.rst files: M Doc/library/ssl.rst diff --git a/Doc/library/ssl.rst b/Doc/library/ssl.rst index 4142e41dcb451..ea0f13c4f18b9 100644 --- a/Doc/library/ssl.rst +++ b/Doc/library/ssl.rst @@ -2486,14 +2486,17 @@ provided. - :meth:`~SSLSocket.read` - :meth:`~SSLSocket.write` - :meth:`~SSLSocket.getpeercert` + - :meth:`~SSLSocket.selected_alpn_protocol` - :meth:`~SSLSocket.selected_npn_protocol` - :meth:`~SSLSocket.cipher` - :meth:`~SSLSocket.shared_ciphers` - :meth:`~SSLSocket.compression` - :meth:`~SSLSocket.pending` - :meth:`~SSLSocket.do_handshake` + - :meth:`~SSLSocket.verify_client_post_handshake` - :meth:`~SSLSocket.unwrap` - :meth:`~SSLSocket.get_channel_binding` + - :meth:`~SSLSocket.version` When compared to :class:`SSLSocket`, this object lacks the following features: From webhook-mailer at python.org Mon Jun 29 04:37:08 2020 From: webhook-mailer at python.org (Srinivas Reddy Thatiparthy =?utf-8?q??= =?utf-8?b?KOCwtuCxjeCwsOCxgOCwqOCwv+CwteCwvuCwuOCxjSAg4LCw4LGG4LCh?= =?utf-8?b?4LGN4LCh4LC/IOCwpOCwvuCwn+Cwv+CwquCwsOCxjeCwpOCwvyk=?=) Date: Mon, 29 Jun 2020 08:37:08 -0000 Subject: [Python-checkins] bpo-41048: mimetypes should read the rule file using UTF-8, not the locale encoding (GH-20998) Message-ID: https://github.com/python/cpython/commit/7f569c9bc0079906012b3034d30fe8abc742e7fc commit: 7f569c9bc0079906012b3034d30fe8abc742e7fc branch: master author: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) committer: GitHub date: 2020-06-29T11:36:48+03:00 summary: bpo-41048: mimetypes should read the rule file using UTF-8, not the locale encoding (GH-20998) files: A Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst M Lib/mimetypes.py M Lib/test/test_mimetypes.py M Misc/ACKS diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index 61bfff1635911..f3343c805452d 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -372,7 +372,7 @@ def init(files=None): def read_mime_types(file): try: - f = open(file) + f = open(file, encoding='utf-8') except OSError: return None with f: diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index 9cac6ce0225e1..683d393fdb491 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -67,6 +67,18 @@ def test_read_mime_types(self): mime_dict = mimetypes.read_mime_types(file) eq(mime_dict[".pyunit"], "x-application/x-unittest") + # bpo-41048: read_mime_types should read the rule file with 'utf-8' encoding. + # Not with locale encoding. _bootlocale has been imported because io.open(...) + # uses it. + with support.temp_dir() as directory: + data = "application/no-mans-land Fran\u00E7ais" + file = pathlib.Path(directory, "sample.mimetype") + file.write_text(data, encoding='utf-8') + import _bootlocale + with support.swap_attr(_bootlocale, 'getpreferredencoding', lambda do_setlocale=True: 'ASCII'): + mime_dict = mimetypes.read_mime_types(file) + eq(mime_dict[".Fran?ais"], "application/no-mans-land") + def test_non_standard_types(self): eq = self.assertEqual # First try strict diff --git a/Misc/ACKS b/Misc/ACKS index 87f0dede365c2..641ef0cace00e 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1706,6 +1706,7 @@ Mikhail Terekhov Victor Terr?n Pablo Galindo Richard M. Tew +Srinivas Reddy Thatiparthy Tobias Thelen Christian Theune F?vry Thibault diff --git a/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst b/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst new file mode 100644 index 0000000000000..2595900137d69 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst @@ -0,0 +1,2 @@ +:func:`mimetypes.read_mime_types` function reads the rule file using UTF-8 encoding, not the locale encoding. +Patch by Srinivas Reddy Thatiparthy. \ No newline at end of file From webhook-mailer at python.org Mon Jun 29 08:03:09 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Mon, 29 Jun 2020 12:03:09 -0000 Subject: [Python-checkins] Update libmpdec license dates (GH-21216) Message-ID: https://github.com/python/cpython/commit/a3ad95dd2176b529fb6a9f763955b5cc1b499dbb commit: a3ad95dd2176b529fb6a9f763955b5cc1b499dbb branch: master author: Stefan Krah committer: GitHub date: 2020-06-29T14:03:01+02:00 summary: Update libmpdec license dates (GH-21216) files: M Doc/license.rst diff --git a/Doc/license.rst b/Doc/license.rst index 472a5cf3d88b3..fa6d71a78042d 100644 --- a/Doc/license.rst +++ b/Doc/license.rst @@ -889,7 +889,7 @@ libmpdec The :mod:`_decimal` module is built using an included copy of the libmpdec library unless the build is configured ``--with-system-libmpdec``:: - Copyright (c) 2008-2016 Stefan Krah. All rights reserved. + Copyright (c) 2008-2020 Stefan Krah. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions From webhook-mailer at python.org Mon Jun 29 08:07:46 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 29 Jun 2020 12:07:46 -0000 Subject: [Python-checkins] bpo-41048: mimetypes should read the rule file using UTF-8, not the locale encoding (GH-20998) Message-ID: https://github.com/python/cpython/commit/7731139b7af655b9f5df6d1b5493f8dfdf41d569 commit: 7731139b7af655b9f5df6d1b5493f8dfdf41d569 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-29T05:07:41-07:00 summary: bpo-41048: mimetypes should read the rule file using UTF-8, not the locale encoding (GH-20998) (cherry picked from commit 7f569c9bc0079906012b3034d30fe8abc742e7fc) Co-authored-by: Srinivas Reddy Thatiparthy (?????????? ?????? ?????????) files: A Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst M Lib/mimetypes.py M Lib/test/test_mimetypes.py M Misc/ACKS diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py index 434f5b37c5821..954bb0a745347 100644 --- a/Lib/mimetypes.py +++ b/Lib/mimetypes.py @@ -372,7 +372,7 @@ def init(files=None): def read_mime_types(file): try: - f = open(file) + f = open(file, encoding='utf-8') except OSError: return None with f: diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py index 9cac6ce0225e1..683d393fdb491 100644 --- a/Lib/test/test_mimetypes.py +++ b/Lib/test/test_mimetypes.py @@ -67,6 +67,18 @@ def test_read_mime_types(self): mime_dict = mimetypes.read_mime_types(file) eq(mime_dict[".pyunit"], "x-application/x-unittest") + # bpo-41048: read_mime_types should read the rule file with 'utf-8' encoding. + # Not with locale encoding. _bootlocale has been imported because io.open(...) + # uses it. + with support.temp_dir() as directory: + data = "application/no-mans-land Fran\u00E7ais" + file = pathlib.Path(directory, "sample.mimetype") + file.write_text(data, encoding='utf-8') + import _bootlocale + with support.swap_attr(_bootlocale, 'getpreferredencoding', lambda do_setlocale=True: 'ASCII'): + mime_dict = mimetypes.read_mime_types(file) + eq(mime_dict[".Fran?ais"], "application/no-mans-land") + def test_non_standard_types(self): eq = self.assertEqual # First try strict diff --git a/Misc/ACKS b/Misc/ACKS index 8098637a32c5d..a08e917b30765 100644 --- a/Misc/ACKS +++ b/Misc/ACKS @@ -1669,6 +1669,7 @@ Mikhail Terekhov Victor Terr?n Pablo Galindo Richard M. Tew +Srinivas Reddy Thatiparthy Tobias Thelen Christian Theune F?vry Thibault diff --git a/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst b/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst new file mode 100644 index 0000000000000..2595900137d69 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst @@ -0,0 +1,2 @@ +:func:`mimetypes.read_mime_types` function reads the rule file using UTF-8 encoding, not the locale encoding. +Patch by Srinivas Reddy Thatiparthy. \ No newline at end of file From webhook-mailer at python.org Mon Jun 29 13:39:37 2020 From: webhook-mailer at python.org (Ravi Teja P) Date: Mon, 29 Jun 2020 17:39:37 -0000 Subject: [Python-checkins] bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) Message-ID: https://github.com/python/cpython/commit/b30ee26e366bf509b7538d79bfec6c6d38d53f28 commit: b30ee26e366bf509b7538d79bfec6c6d38d53f28 branch: master author: Ravi Teja P committer: GitHub date: 2020-06-29T13:39:29-04:00 summary: bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) The __hash__() methods of classes IPv4Interface and IPv6Interface had issue of generating constant hash values of 32 and 128 respectively causing hash collisions. The fix uses the hash() function to generate hash values for the objects instead of XOR operation files: A Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst M Lib/ipaddress.py M Lib/test/test_ipaddress.py diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 6e5a754c2acf1..75b4c352c1d25 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1420,7 +1420,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ @@ -2120,7 +2120,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index 6d5814c9774a0..3c070080a6aae 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -2548,6 +2548,18 @@ def testsixtofour(self): sixtofouraddr.sixtofour) self.assertFalse(bad_addr.sixtofour) + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV4HashIsNotConstant(self): + ipv4_address1 = ipaddress.IPv4Interface("1.2.3.4") + ipv4_address2 = ipaddress.IPv4Interface("2.3.4.5") + self.assertNotEqual(ipv4_address1.__hash__(), ipv4_address2.__hash__()) + + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV6HashIsNotConstant(self): + ipv6_address1 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:1") + ipv6_address2 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:2") + self.assertNotEqual(ipv6_address1.__hash__(), ipv6_address2.__hash__()) + if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst new file mode 100644 index 0000000000000..1380b31fbe9f4 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst @@ -0,0 +1 @@ +The __hash__() methods of ipaddress.IPv4Interface and ipaddress.IPv6Interface incorrectly generated constant hash values of 32 and 128 respectively. This resulted in always causing hash collisions. The fix uses hash() to generate hash values for the tuple of (address, mask length, network address). From webhook-mailer at python.org Mon Jun 29 14:13:00 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Mon, 29 Jun 2020 18:13:00 -0000 Subject: [Python-checkins] bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) Message-ID: https://github.com/python/cpython/commit/dc8ce8ead182de46584cc1ed8a8c51d48240cbd5 commit: dc8ce8ead182de46584cc1ed8a8c51d48240cbd5 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-29T11:12:50-07:00 summary: bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) The __hash__() methods of classes IPv4Interface and IPv6Interface had issue of generating constant hash values of 32 and 128 respectively causing hash collisions. The fix uses the hash() function to generate hash values for the objects instead of XOR operation (cherry picked from commit b30ee26e366bf509b7538d79bfec6c6d38d53f28) Co-authored-by: Ravi Teja P files: A Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst M Lib/ipaddress.py M Lib/test/test_ipaddress.py diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 873c7644081af..a3a04f7f4b309 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1370,7 +1370,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ @@ -2017,7 +2017,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index de77111705b69..2eba740e5e7a4 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -2053,6 +2053,18 @@ def testsixtofour(self): sixtofouraddr.sixtofour) self.assertFalse(bad_addr.sixtofour) + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV4HashIsNotConstant(self): + ipv4_address1 = ipaddress.IPv4Interface("1.2.3.4") + ipv4_address2 = ipaddress.IPv4Interface("2.3.4.5") + self.assertNotEqual(ipv4_address1.__hash__(), ipv4_address2.__hash__()) + + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV6HashIsNotConstant(self): + ipv6_address1 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:1") + ipv6_address2 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:2") + self.assertNotEqual(ipv6_address1.__hash__(), ipv6_address2.__hash__()) + if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst new file mode 100644 index 0000000000000..1380b31fbe9f4 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst @@ -0,0 +1 @@ +The __hash__() methods of ipaddress.IPv4Interface and ipaddress.IPv6Interface incorrectly generated constant hash values of 32 and 128 respectively. This resulted in always causing hash collisions. The fix uses hash() to generate hash values for the tuple of (address, mask length, network address). From webhook-mailer at python.org Mon Jun 29 14:57:04 2020 From: webhook-mailer at python.org (Jeong Ukjae) Date: Mon, 29 Jun 2020 18:57:04 -0000 Subject: [Python-checkins] Fix typo in Object/listobject.c (GH-21079) Message-ID: https://github.com/python/cpython/commit/5b96370030707b68e8a5b787e933654297ddbc98 commit: 5b96370030707b68e8a5b787e933654297ddbc98 branch: master author: Jeong Ukjae committer: GitHub date: 2020-06-29T21:56:56+03:00 summary: Fix typo in Object/listobject.c (GH-21079) files: M Misc/NEWS.d/3.9.0a5.rst M Objects/listobject.c diff --git a/Misc/NEWS.d/3.9.0a5.rst b/Misc/NEWS.d/3.9.0a5.rst index 01cbd4423426e..39e017768c3ad 100644 --- a/Misc/NEWS.d/3.9.0a5.rst +++ b/Misc/NEWS.d/3.9.0a5.rst @@ -176,7 +176,7 @@ convention. Patch by Dong-hee Na. .. section: Core and Builtins Chaged list overallocation strategy. It no longer overallocates if the new -size is closer to overalocated size than to the old size and adds padding. +size is closer to overallocated size than to the old size and adds padding. .. diff --git a/Objects/listobject.c b/Objects/listobject.c index ab036154eacc2..aac87ea1b61c9 100644 --- a/Objects/listobject.c +++ b/Objects/listobject.c @@ -69,7 +69,7 @@ list_resize(PyListObject *self, Py_ssize_t newsize) * is PY_SSIZE_T_MAX * (9 / 8) + 6 which always fits in a size_t. */ new_allocated = ((size_t)newsize + (newsize >> 3) + 6) & ~(size_t)3; - /* Do not overallocate if the new size is closer to overalocated size + /* Do not overallocate if the new size is closer to overallocated size * than to the old size. */ if (newsize - Py_SIZE(self) > (Py_ssize_t)(new_allocated - newsize)) From webhook-mailer at python.org Mon Jun 29 15:36:49 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Mon, 29 Jun 2020 19:36:49 -0000 Subject: [Python-checkins] bpo-37999: Simplify the conversion code for %c, %d, %x, etc. (GH-20437) Message-ID: https://github.com/python/cpython/commit/e67f7db3c34f70536f36c56bb82e33c3512a53a3 commit: e67f7db3c34f70536f36c56bb82e33c3512a53a3 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-29T22:36:41+03:00 summary: bpo-37999: Simplify the conversion code for %c, %d, %x, etc. (GH-20437) Since PyLong_AsLong() no longer use __int__, explicit call of PyNumber_Index() before it is no longer needed. files: M Objects/bytearrayobject.c M Objects/bytesobject.c M Objects/unicodeobject.c diff --git a/Objects/bytearrayobject.c b/Objects/bytearrayobject.c index 5a803be6277c8..83c79b200a0a1 100644 --- a/Objects/bytearrayobject.c +++ b/Objects/bytearrayobject.c @@ -22,22 +22,15 @@ char _PyByteArray_empty_string[] = ""; static int _getbytevalue(PyObject* arg, int *value) { - long face_value; + int overflow; + long face_value = PyLong_AsLongAndOverflow(arg, &overflow); - if (PyLong_Check(arg)) { - face_value = PyLong_AsLong(arg); - } else { - PyObject *index = PyNumber_Index(arg); - if (index == NULL) { - *value = -1; - return 0; - } - face_value = PyLong_AsLong(index); - Py_DECREF(index); + if (face_value == -1 && PyErr_Occurred()) { + *value = -1; + return 0; } - if (face_value < 0 || face_value >= 256) { - /* this includes the OverflowError in case the long is too large */ + /* this includes an overflow in converting to C long */ PyErr_SetString(PyExc_ValueError, "byte must be in range(0, 256)"); *value = -1; return 0; diff --git a/Objects/bytesobject.c b/Objects/bytesobject.c index 782bc8e1fa0b7..7632cb5e4ddd9 100644 --- a/Objects/bytesobject.c +++ b/Objects/bytesobject.c @@ -510,17 +510,14 @@ formatlong(PyObject *v, int flags, int prec, int type) iobj = _PyNumber_Index(v); else iobj = PyNumber_Long(v); - if (iobj == NULL) { - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return NULL; - } - else if (!PyLong_Check(iobj)) - Py_CLEAR(iobj); if (iobj != NULL) { + assert(PyLong_Check(iobj)); result = _PyUnicode_FormatLong(iobj, flags & F_ALT, prec, type); Py_DECREF(iobj); return result; } + if (!PyErr_ExceptionMatches(PyExc_TypeError)) + return NULL; } PyErr_Format(PyExc_TypeError, "%%%c format: %s is required, not %.200s", type, @@ -542,26 +539,16 @@ byte_converter(PyObject *arg, char *p) return 1; } else { - PyObject *iobj; - long ival; int overflow; - /* make sure number is a type of integer */ - if (PyLong_Check(arg)) { - ival = PyLong_AsLongAndOverflow(arg, &overflow); - } - else { - iobj = PyNumber_Index(arg); - if (iobj == NULL) { - if (!PyErr_ExceptionMatches(PyExc_TypeError)) - return 0; + long ival = PyLong_AsLongAndOverflow(arg, &overflow); + if (ival == -1 && PyErr_Occurred()) { + if (PyErr_ExceptionMatches(PyExc_TypeError)) { goto onError; } - ival = PyLong_AsLongAndOverflow(iobj, &overflow); - Py_DECREF(iobj); + return 0; } - if (!overflow && ival == -1 && PyErr_Occurred()) - goto onError; - if (overflow || !(0 <= ival && ival <= 255)) { + if (!(0 <= ival && ival <= 255)) { + /* this includes an overflow in converting to C long */ PyErr_SetString(PyExc_OverflowError, "%c arg not in range(256)"); return 0; diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 6fa6c3f6985cd..c46ba4ae57dc6 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -14641,19 +14641,14 @@ mainformatlong(PyObject *v, if (!PyLong_Check(v)) { if (type == 'o' || type == 'x' || type == 'X') { iobj = _PyNumber_Index(v); - if (iobj == NULL) { - if (PyErr_ExceptionMatches(PyExc_TypeError)) - goto wrongtype; - return -1; - } } else { iobj = PyNumber_Long(v); - if (iobj == NULL ) { - if (PyErr_ExceptionMatches(PyExc_TypeError)) - goto wrongtype; - return -1; - } + } + if (iobj == NULL ) { + if (PyErr_ExceptionMatches(PyExc_TypeError)) + goto wrongtype; + return -1; } assert(PyLong_Check(iobj)); } @@ -14736,24 +14731,17 @@ formatchar(PyObject *v) goto onError; } else { - PyObject *iobj; - long x; - /* make sure number is a type of integer */ - if (!PyLong_Check(v)) { - iobj = PyNumber_Index(v); - if (iobj == NULL) { + int overflow; + long x = PyLong_AsLongAndOverflow(v, &overflow); + if (x == -1 && PyErr_Occurred()) { + if (PyErr_ExceptionMatches(PyExc_TypeError)) { goto onError; } - x = PyLong_AsLong(iobj); - Py_DECREF(iobj); - } - else { - x = PyLong_AsLong(v); + return (Py_UCS4) -1; } - if (x == -1 && PyErr_Occurred()) - goto onError; if (x < 0 || x > MAX_UNICODE) { + /* this includes an overflow in converting to C long */ PyErr_SetString(PyExc_OverflowError, "%c arg not in range(0x110000)"); return (Py_UCS4) -1; From webhook-mailer at python.org Mon Jun 29 16:59:30 2020 From: webhook-mailer at python.org (Jason R. Coombs) Date: Mon, 29 Jun 2020 20:59:30 -0000 Subject: [Python-checkins] bpo-40924: Ensure importlib.resources.path returns an extant path (GH-20857) Message-ID: https://github.com/python/cpython/commit/2fb5f038f2a2e91a7293d62dfd5601e6eb500c55 commit: 2fb5f038f2a2e91a7293d62dfd5601e6eb500c55 branch: master author: Jason R. Coombs committer: GitHub date: 2020-06-29T22:59:22+02:00 summary: bpo-40924: Ensure importlib.resources.path returns an extant path (GH-20857) files: A Misc/NEWS.d/next/Library/2020-06-13-12-04-50.bpo-40924.SM_luS.rst M Lib/importlib/readers.py M Lib/test/test_importlib/test_path.py diff --git a/Lib/importlib/readers.py b/Lib/importlib/readers.py index fb49ebe2b1642..6331e4daf4313 100644 --- a/Lib/importlib/readers.py +++ b/Lib/importlib/readers.py @@ -7,11 +7,19 @@ class FileReader(abc.TraversableResources): def __init__(self, loader): self.path = pathlib.Path(loader.path).parent + def resource_path(self, resource): + """ + Return the file system path to prevent + `resources.path()` from creating a temporary + copy. + """ + return str(self.path.joinpath(resource)) + def files(self): return self.path -class ZipReader(FileReader): +class ZipReader(abc.TraversableResources): def __init__(self, loader, module): _, _, name = module.rpartition('.') prefix = loader.prefix.replace('\\', '/') + name + '/' @@ -28,3 +36,6 @@ def is_resource(self, path): # for non-existent paths. target = self.files().joinpath(path) return target.is_file() and target.exists() + + def files(self): + return self.path diff --git a/Lib/test/test_importlib/test_path.py b/Lib/test/test_importlib/test_path.py index c4e7285411322..abf8086558158 100644 --- a/Lib/test/test_importlib/test_path.py +++ b/Lib/test/test_importlib/test_path.py @@ -27,6 +27,15 @@ def test_reading(self): class PathDiskTests(PathTests, unittest.TestCase): data = data01 + def test_natural_path(self): + """ + Guarantee the internal implementation detail that + file-system-backed resources do not get the tempdir + treatment. + """ + with resources.path(self.data, 'utf-8.file') as path: + assert 'data' in str(path) + class PathZipTests(PathTests, util.ZipSetup, unittest.TestCase): def test_remove_in_context_manager(self): diff --git a/Misc/NEWS.d/next/Library/2020-06-13-12-04-50.bpo-40924.SM_luS.rst b/Misc/NEWS.d/next/Library/2020-06-13-12-04-50.bpo-40924.SM_luS.rst new file mode 100644 index 0000000000000..4e4c6e88ac572 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-13-12-04-50.bpo-40924.SM_luS.rst @@ -0,0 +1,3 @@ +Ensure ``importlib.resources.path`` returns an extant path for the +SourceFileLoader's resource reader. Avoids the regression identified in +master while a long-term solution is devised. From webhook-mailer at python.org Mon Jun 29 18:49:12 2020 From: webhook-mailer at python.org (Victor Stinner) Date: Mon, 29 Jun 2020 22:49:12 -0000 Subject: [Python-checkins] bpo-23427: Add sys.orig_argv attribute (GH-20729) Message-ID: https://github.com/python/cpython/commit/dd8a93e23b5c4f9290e1cea6183d97eb9b5e61c0 commit: dd8a93e23b5c4f9290e1cea6183d97eb9b5e61c0 branch: master author: Victor Stinner committer: GitHub date: 2020-06-30T00:49:03+02:00 summary: bpo-23427: Add sys.orig_argv attribute (GH-20729) Add sys.orig_argv attribute: the list of the original command line arguments passed to the Python executable. Rename also PyConfig._orig_argv to PyConfig.orig_argv and document it. files: A Misc/NEWS.d/next/Library/2020-06-08-18-59-16.bpo-23427.ilg1Cz.rst M Doc/c-api/init_config.rst M Doc/library/sys.rst M Doc/whatsnew/3.10.rst M Include/cpython/initconfig.h M Lib/test/test_embed.py M Lib/test/test_sys.py M Python/initconfig.c M Python/sysmodule.c diff --git a/Doc/c-api/init_config.rst b/Doc/c-api/init_config.rst index 9b0728d962152..84064d93ea3b1 100644 --- a/Doc/c-api/init_config.rst +++ b/Doc/c-api/init_config.rst @@ -424,6 +424,8 @@ PyConfig :c:member:`~PyConfig.argv` is empty, an empty string is added to ensure that :data:`sys.argv` always exists and is never empty. + See also the :c:member:`~PyConfig.orig_argv` member. + .. c:member:: wchar_t* base_exec_prefix :data:`sys.base_exec_prefix`. @@ -586,6 +588,23 @@ PyConfig * 1: Remove assertions, set ``__debug__`` to ``False`` * 2: Strip docstrings + .. c:member:: PyWideStringList orig_argv + + The list of the original command line arguments passed to the Python + executable. + + If :c:member:`~PyConfig.orig_argv` list is empty and + :c:member:`~PyConfig.argv` is not a list only containing an empty + string, :c:func:`PyConfig_Read()` copies :c:member:`~PyConfig.argv` into + :c:member:`~PyConfig.orig_argv` before modifying + :c:member:`~PyConfig.argv` (if :c:member:`~PyConfig.parse_argv` is + non-zero). + + See also the :c:member:`~PyConfig.argv` member and the + :c:func:`Py_GetArgcArgv` function. + + .. versionadded:: 3.10 + .. c:member:: int parse_argv If non-zero, parse :c:member:`~PyConfig.argv` the same way the regular @@ -982,6 +1001,8 @@ Py_GetArgcArgv() Get the original command line arguments, before Python modified them. + See also :c:member:`PyConfig.orig_argv` member. + Multi-Phase Initialization Private Provisional API -------------------------------------------------- diff --git a/Doc/library/sys.rst b/Doc/library/sys.rst index 880f252f84aa0..d201d7061f980 100644 --- a/Doc/library/sys.rst +++ b/Doc/library/sys.rst @@ -66,6 +66,8 @@ always available. To loop over the standard input, or the list of files given on the command line, see the :mod:`fileinput` module. + See also :data:`sys.orig_argv`. + .. note:: On Unix, command line arguments are passed by bytes from OS. Python decodes them with filesystem encoding and "surrogateescape" error handler. @@ -1037,6 +1039,16 @@ always available. deleting essential items from the dictionary may cause Python to fail. +.. data:: orig_argv + + The list of the original command line arguments passed to the Python + executable. + + See also :data:`sys.argv`. + + .. versionadded:: 3.10 + + .. data:: path .. index:: triple: module; search; path diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 0c4ff026bd201..a755d2672ae6d 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -110,6 +110,13 @@ Added the *root_dir* and *dir_fd* parameters in :func:`~glob.glob` and :func:`~glob.iglob` which allow to specify the root directory for searching. (Contributed by Serhiy Storchaka in :issue:`38144`.) +sys +--- + +Add :data:`sys.orig_argv` attribute: the list of the original command line +arguments passed to the Python executable. +(Contributed by Victor Stinner in :issue:`23427`.) + Optimizations ============= @@ -150,10 +157,14 @@ C API Changes New Features ------------ - The result of :c:func:`PyNumber_Index` now always has exact type :class:`int`. +* The result of :c:func:`PyNumber_Index` now always has exact type :class:`int`. Previously, the result could have been an instance of a subclass of ``int``. (Contributed by Serhiy Storchaka in :issue:`40792`.) +* Add a new :c:member:`~PyConfig.orig_argv` member to the :c:type:`PyConfig` + structure: the list of the original command line arguments passed to the + Python executable. + (Contributed by Victor Stinner in :issue:`23427`.) Porting to Python 3.10 ---------------------- diff --git a/Include/cpython/initconfig.h b/Include/cpython/initconfig.h index 5b05eab63bb46..bbe8387677715 100644 --- a/Include/cpython/initconfig.h +++ b/Include/cpython/initconfig.h @@ -408,13 +408,15 @@ typedef struct { Default: 0. */ int _isolated_interpreter; - /* Original command line arguments. If _orig_argv is empty and _argv is - not equal to [''], PyConfig_Read() copies the configuration 'argv' list - into '_orig_argv' list before modifying 'argv' list (if parse_argv - is non-zero). + /* The list of the original command line arguments passed to the Python + executable. + + If 'orig_argv' list is empty and 'argv' is not a list only containing an + empty string, PyConfig_Read() copies 'argv' into 'orig_argv' before + modifying 'argv' (if 'parse_argv is non-zero). _PyConfig_Write() initializes Py_GetArgcArgv() to this list. */ - PyWideStringList _orig_argv; + PyWideStringList orig_argv; } PyConfig; PyAPI_FUNC(void) PyConfig_InitPythonConfig(PyConfig *config); @@ -445,7 +447,7 @@ PyAPI_FUNC(PyStatus) PyConfig_SetWideStringList(PyConfig *config, /* Get the original command line arguments, before Python modified them. - See also PyConfig._orig_argv. */ + See also PyConfig.orig_argv. */ PyAPI_FUNC(void) Py_GetArgcArgv(int *argc, wchar_t ***argv); #endif /* !Py_LIMITED_API */ diff --git a/Lib/test/test_embed.py b/Lib/test/test_embed.py index fe47289777a42..174892a22b48b 100644 --- a/Lib/test/test_embed.py +++ b/Lib/test/test_embed.py @@ -365,7 +365,7 @@ class InitConfigTests(EmbeddingTestsMixin, unittest.TestCase): 'program_name': GET_DEFAULT_CONFIG, 'parse_argv': 0, 'argv': [""], - '_orig_argv': [], + 'orig_argv': [], 'xoptions': [], 'warnoptions': [], @@ -739,11 +739,11 @@ def test_init_from_config(self): 'pycache_prefix': 'conf_pycache_prefix', 'program_name': './conf_program_name', 'argv': ['-c', 'arg2'], - '_orig_argv': ['python3', - '-W', 'cmdline_warnoption', - '-X', 'cmdline_xoption', - '-c', 'pass', - 'arg2'], + 'orig_argv': ['python3', + '-W', 'cmdline_warnoption', + '-X', 'cmdline_xoption', + '-c', 'pass', + 'arg2'], 'parse_argv': 1, 'xoptions': [ 'config_xoption1=3', @@ -874,7 +874,7 @@ def test_preinit_parse_argv(self): } config = { 'argv': ['script.py'], - '_orig_argv': ['python3', '-X', 'dev', 'script.py'], + 'orig_argv': ['python3', '-X', 'dev', 'script.py'], 'run_filename': os.path.abspath('script.py'), 'dev_mode': 1, 'faulthandler': 1, @@ -896,7 +896,7 @@ def test_preinit_dont_parse_argv(self): "script.py"] config = { 'argv': argv, - '_orig_argv': argv, + 'orig_argv': argv, 'isolated': 0, } self.check_all_configs("test_preinit_dont_parse_argv", config, preconfig, @@ -975,9 +975,9 @@ def test_init_sys_add(self): 'ignore:::sysadd_warnoption', 'ignore:::config_warnoption', ], - '_orig_argv': ['python3', - '-W', 'ignore:::cmdline_warnoption', - '-X', 'cmdline_xoption'], + 'orig_argv': ['python3', + '-W', 'ignore:::cmdline_warnoption', + '-X', 'cmdline_xoption'], } self.check_all_configs("test_init_sys_add", config, api=API_PYTHON) @@ -986,7 +986,7 @@ def test_init_run_main(self): 'print(json.dumps(_testinternalcapi.get_configs()))') config = { 'argv': ['-c', 'arg2'], - '_orig_argv': ['python3', '-c', code, 'arg2'], + 'orig_argv': ['python3', '-c', code, 'arg2'], 'program_name': './python3', 'run_command': code + '\n', 'parse_argv': 1, @@ -998,9 +998,9 @@ def test_init_main(self): 'print(json.dumps(_testinternalcapi.get_configs()))') config = { 'argv': ['-c', 'arg2'], - '_orig_argv': ['python3', - '-c', code, - 'arg2'], + 'orig_argv': ['python3', + '-c', code, + 'arg2'], 'program_name': './python3', 'run_command': code + '\n', 'parse_argv': 1, @@ -1014,7 +1014,7 @@ def test_init_parse_argv(self): config = { 'parse_argv': 1, 'argv': ['-c', 'arg1', '-v', 'arg3'], - '_orig_argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], + 'orig_argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], 'program_name': './argv0', 'run_command': 'pass\n', 'use_environment': 0, @@ -1028,7 +1028,7 @@ def test_init_dont_parse_argv(self): config = { 'parse_argv': 0, 'argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], - '_orig_argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], + 'orig_argv': ['./argv0', '-E', '-c', 'pass', 'arg1', '-v', 'arg3'], 'program_name': './argv0', } self.check_all_configs("test_init_dont_parse_argv", config, pre_config, @@ -1316,9 +1316,9 @@ def test_init_warnoptions(self): 'faulthandler': 1, 'bytes_warning': 1, 'warnoptions': warnoptions, - '_orig_argv': ['python3', - '-Wignore:::cmdline1', - '-Wignore:::cmdline2'], + 'orig_argv': ['python3', + '-Wignore:::cmdline1', + '-Wignore:::cmdline2'], } self.check_all_configs("test_init_warnoptions", config, preconfig, api=API_PYTHON) diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py index 194128e5c6bf2..aaba6630ff439 100644 --- a/Lib/test/test_sys.py +++ b/Lib/test/test_sys.py @@ -434,6 +434,11 @@ def g456(): def test_attributes(self): self.assertIsInstance(sys.api_version, int) self.assertIsInstance(sys.argv, list) + for arg in sys.argv: + self.assertIsInstance(arg, str) + self.assertIsInstance(sys.orig_argv, list) + for arg in sys.orig_argv: + self.assertIsInstance(arg, str) self.assertIn(sys.byteorder, ("little", "big")) self.assertIsInstance(sys.builtin_module_names, tuple) self.assertIsInstance(sys.copyright, str) @@ -930,6 +935,21 @@ def test__enablelegacywindowsfsencoding(self): out = out.decode('ascii', 'replace').rstrip() self.assertEqual(out, 'mbcs replace') + def test_orig_argv(self): + code = textwrap.dedent(''' + import sys + print(sys.argv) + print(sys.orig_argv) + ''') + args = [sys.executable, '-I', '-X', 'utf8', '-c', code, 'arg'] + proc = subprocess.run(args, check=True, capture_output=True, text=True) + expected = [ + repr(['-c', 'arg']), # sys.argv + repr(args), # sys.orig_argv + ] + self.assertEqual(proc.stdout.rstrip().splitlines(), expected, + proc) + @test.support.cpython_only class UnraisableHookTest(unittest.TestCase): diff --git a/Misc/NEWS.d/next/Library/2020-06-08-18-59-16.bpo-23427.ilg1Cz.rst b/Misc/NEWS.d/next/Library/2020-06-08-18-59-16.bpo-23427.ilg1Cz.rst new file mode 100644 index 0000000000000..37382975bb4fc --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-08-18-59-16.bpo-23427.ilg1Cz.rst @@ -0,0 +1,2 @@ +Add :data:`sys.orig_argv` attribute: the list of the original command line +arguments passed to the Python executable. diff --git a/Python/initconfig.c b/Python/initconfig.c index 96169454506cb..86285c77e2307 100644 --- a/Python/initconfig.c +++ b/Python/initconfig.c @@ -601,7 +601,7 @@ PyConfig_Clear(PyConfig *config) CLEAR(config->run_filename); CLEAR(config->check_hash_pycs_mode); - _PyWideStringList_Clear(&config->_orig_argv); + _PyWideStringList_Clear(&config->orig_argv); #undef CLEAR } @@ -856,7 +856,7 @@ _PyConfig_Copy(PyConfig *config, const PyConfig *config2) COPY_ATTR(pathconfig_warnings); COPY_ATTR(_init_main); COPY_ATTR(_isolated_interpreter); - COPY_WSTRLIST(_orig_argv); + COPY_WSTRLIST(orig_argv); #undef COPY_ATTR #undef COPY_WSTR_ATTR @@ -957,7 +957,7 @@ config_as_dict(const PyConfig *config) SET_ITEM_INT(pathconfig_warnings); SET_ITEM_INT(_init_main); SET_ITEM_INT(_isolated_interpreter); - SET_ITEM_WSTRLIST(_orig_argv); + SET_ITEM_WSTRLIST(orig_argv); return dict; @@ -1864,8 +1864,8 @@ _PyConfig_Write(const PyConfig *config, _PyRuntimeState *runtime) preconfig->use_environment = config->use_environment; preconfig->dev_mode = config->dev_mode; - if (_Py_SetArgcArgv(config->_orig_argv.length, - config->_orig_argv.items) < 0) + if (_Py_SetArgcArgv(config->orig_argv.length, + config->orig_argv.items) < 0) { return _PyStatus_NO_MEMORY(); } @@ -2501,11 +2501,11 @@ PyConfig_Read(PyConfig *config) config_get_global_vars(config); - if (config->_orig_argv.length == 0 + if (config->orig_argv.length == 0 && !(config->argv.length == 1 && wcscmp(config->argv.items[0], L"") == 0)) { - if (_PyWideStringList_Copy(&config->_orig_argv, &config->argv) < 0) { + if (_PyWideStringList_Copy(&config->orig_argv, &config->argv) < 0) { return _PyStatus_NO_MEMORY(); } } @@ -2589,7 +2589,7 @@ PyConfig_Read(PyConfig *config) assert(config->check_hash_pycs_mode != NULL); assert(config->_install_importlib >= 0); assert(config->pathconfig_warnings >= 0); - assert(_PyWideStringList_CheckConsistency(&config->_orig_argv)); + assert(_PyWideStringList_CheckConsistency(&config->orig_argv)); status = _PyStatus_OK(); diff --git a/Python/sysmodule.c b/Python/sysmodule.c index f3b5a6afdf1e5..9fcdb5dbc49b1 100644 --- a/Python/sysmodule.c +++ b/Python/sysmodule.c @@ -2931,6 +2931,7 @@ _PySys_InitMain(PyThreadState *tstate) } COPY_LIST("argv", config->argv); + COPY_LIST("orig_argv", config->orig_argv); COPY_LIST("warnoptions", config->warnoptions); PyObject *xoptions = sys_create_xoptions_dict(config); From webhook-mailer at python.org Mon Jun 29 20:18:40 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 30 Jun 2020 00:18:40 -0000 Subject: [Python-checkins] bpo-41152: IDLE: always use UTF-8 for standard IO streams (GH-21214) Message-ID: https://github.com/python/cpython/commit/2515a28230b1a011205f30263da6b01c6bd167a3 commit: 2515a28230b1a011205f30263da6b01c6bd167a3 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-29T20:18:22-04:00 summary: bpo-41152: IDLE: always use UTF-8 for standard IO streams (GH-21214) files: A Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/idle_test/test_outwin.py M Lib/idlelib/iomenu.py M Lib/idlelib/outwin.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index c270fcbae2bd1..7ae29af0b30ce 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -3,6 +3,9 @@ Released on 2020-10-05? ====================================== +bpo-41152: The encoding of ``stdin``, ``stdout`` and ``stderr`` in IDLE +is now always UTF-8. + bpo-41144: Make Open Module open a special module such as os.path. bpo-40723: Make test_idle pass when run after import. diff --git a/Lib/idlelib/idle_test/test_outwin.py b/Lib/idlelib/idle_test/test_outwin.py index cd099ecd841b3..e347bfca7f191 100644 --- a/Lib/idlelib/idle_test/test_outwin.py +++ b/Lib/idlelib/idle_test/test_outwin.py @@ -58,11 +58,6 @@ def test_write(self): get = self.text.get write = self.window.write - # Test bytes. - b = b'Test bytes.' - eq(write(b), len(b)) - eq(get('1.0', '1.end'), b.decode()) - # No new line - insert stays on same line. delete('1.0', 'end') test_text = 'test text' diff --git a/Lib/idlelib/iomenu.py b/Lib/idlelib/iomenu.py index 4b2833b8ca56f..7f3f656ee2874 100644 --- a/Lib/idlelib/iomenu.py +++ b/Lib/idlelib/iomenu.py @@ -13,52 +13,12 @@ import idlelib from idlelib.config import idleConf -if idlelib.testing: # Set True by test.test_idle to avoid setlocale. - encoding = 'utf-8' - errors = 'surrogateescape' +encoding = 'utf-8' +if sys.platform == 'win32': + errors = 'surrogatepass' else: - # Try setting the locale, so that we can find out - # what encoding to use - try: - import locale - locale.setlocale(locale.LC_CTYPE, "") - except (ImportError, locale.Error): - pass - - if sys.platform == 'win32': - encoding = 'utf-8' - errors = 'surrogateescape' - else: - try: - # Different things can fail here: the locale module may not be - # loaded, it may not offer nl_langinfo, or CODESET, or the - # resulting codeset may be unknown to Python. We ignore all - # these problems, falling back to ASCII - locale_encoding = locale.nl_langinfo(locale.CODESET) - if locale_encoding: - codecs.lookup(locale_encoding) - except (NameError, AttributeError, LookupError): - # Try getdefaultlocale: it parses environment variables, - # which may give a clue. Unfortunately, getdefaultlocale has - # bugs that can cause ValueError. - try: - locale_encoding = locale.getdefaultlocale()[1] - if locale_encoding: - codecs.lookup(locale_encoding) - except (ValueError, LookupError): - pass + errors = 'surrogateescape' - if locale_encoding: - encoding = locale_encoding.lower() - errors = 'strict' - else: - # POSIX locale or macOS - encoding = 'ascii' - errors = 'surrogateescape' - # Encoding is used in multiple files; locale_encoding nowhere. - # The only use of 'encoding' below is in _decode as initial value - # of deprecated block asking user for encoding. - # Perhaps use elsewhere should be reviewed. coding_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII) blank_re = re.compile(r'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) diff --git a/Lib/idlelib/outwin.py b/Lib/idlelib/outwin.py index 90272b6feb4af..5ab08bbaf4bc9 100644 --- a/Lib/idlelib/outwin.py +++ b/Lib/idlelib/outwin.py @@ -6,7 +6,6 @@ from tkinter import messagebox from idlelib.editor import EditorWindow -from idlelib import iomenu file_line_pats = [ @@ -110,8 +109,7 @@ def write(self, s, tags=(), mark="insert"): Return: Length of text inserted. """ - if isinstance(s, bytes): - s = s.decode(iomenu.encoding, "replace") + assert isinstance(s, str) self.text.insert(mark, s, tags) self.text.see(mark) self.text.update() diff --git a/Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst b/Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst new file mode 100644 index 0000000000000..434be10b5309c --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst @@ -0,0 +1,2 @@ +The encoding of ``stdin``, ``stdout`` and ``stderr`` in IDLE is now always +UTF-8. From webhook-mailer at python.org Mon Jun 29 20:39:06 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 30 Jun 2020 00:39:06 -0000 Subject: [Python-checkins] bpo-41152: IDLE: always use UTF-8 for standard IO streams (GH-21214) Message-ID: https://github.com/python/cpython/commit/00fd04b9b7537c473c3f9396a861868b8ddd3bb2 commit: 00fd04b9b7537c473c3f9396a861868b8ddd3bb2 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-29T17:39:02-07:00 summary: bpo-41152: IDLE: always use UTF-8 for standard IO streams (GH-21214) (cherry picked from commit 2515a28230b1a011205f30263da6b01c6bd167a3) Co-authored-by: Serhiy Storchaka files: A Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst M Lib/idlelib/NEWS.txt M Lib/idlelib/idle_test/test_outwin.py M Lib/idlelib/iomenu.py M Lib/idlelib/outwin.py diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt index 584fd4631fbc2..59b34b1519fdf 100644 --- a/Lib/idlelib/NEWS.txt +++ b/Lib/idlelib/NEWS.txt @@ -3,6 +3,9 @@ Released on 2020-07-03? ====================================== +bpo-41152: The encoding of ``stdin``, ``stdout`` and ``stderr`` in IDLE +is now always UTF-8. + bpo-41144: Make Open Module open a special module such as os.path. bpo-40723: Make test_idle pass when run after import. diff --git a/Lib/idlelib/idle_test/test_outwin.py b/Lib/idlelib/idle_test/test_outwin.py index cd099ecd841b3..e347bfca7f191 100644 --- a/Lib/idlelib/idle_test/test_outwin.py +++ b/Lib/idlelib/idle_test/test_outwin.py @@ -58,11 +58,6 @@ def test_write(self): get = self.text.get write = self.window.write - # Test bytes. - b = b'Test bytes.' - eq(write(b), len(b)) - eq(get('1.0', '1.end'), b.decode()) - # No new line - insert stays on same line. delete('1.0', 'end') test_text = 'test text' diff --git a/Lib/idlelib/iomenu.py b/Lib/idlelib/iomenu.py index 4b2833b8ca56f..7f3f656ee2874 100644 --- a/Lib/idlelib/iomenu.py +++ b/Lib/idlelib/iomenu.py @@ -13,52 +13,12 @@ import idlelib from idlelib.config import idleConf -if idlelib.testing: # Set True by test.test_idle to avoid setlocale. - encoding = 'utf-8' - errors = 'surrogateescape' +encoding = 'utf-8' +if sys.platform == 'win32': + errors = 'surrogatepass' else: - # Try setting the locale, so that we can find out - # what encoding to use - try: - import locale - locale.setlocale(locale.LC_CTYPE, "") - except (ImportError, locale.Error): - pass - - if sys.platform == 'win32': - encoding = 'utf-8' - errors = 'surrogateescape' - else: - try: - # Different things can fail here: the locale module may not be - # loaded, it may not offer nl_langinfo, or CODESET, or the - # resulting codeset may be unknown to Python. We ignore all - # these problems, falling back to ASCII - locale_encoding = locale.nl_langinfo(locale.CODESET) - if locale_encoding: - codecs.lookup(locale_encoding) - except (NameError, AttributeError, LookupError): - # Try getdefaultlocale: it parses environment variables, - # which may give a clue. Unfortunately, getdefaultlocale has - # bugs that can cause ValueError. - try: - locale_encoding = locale.getdefaultlocale()[1] - if locale_encoding: - codecs.lookup(locale_encoding) - except (ValueError, LookupError): - pass + errors = 'surrogateescape' - if locale_encoding: - encoding = locale_encoding.lower() - errors = 'strict' - else: - # POSIX locale or macOS - encoding = 'ascii' - errors = 'surrogateescape' - # Encoding is used in multiple files; locale_encoding nowhere. - # The only use of 'encoding' below is in _decode as initial value - # of deprecated block asking user for encoding. - # Perhaps use elsewhere should be reviewed. coding_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII) blank_re = re.compile(r'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) diff --git a/Lib/idlelib/outwin.py b/Lib/idlelib/outwin.py index 90272b6feb4af..5ab08bbaf4bc9 100644 --- a/Lib/idlelib/outwin.py +++ b/Lib/idlelib/outwin.py @@ -6,7 +6,6 @@ from tkinter import messagebox from idlelib.editor import EditorWindow -from idlelib import iomenu file_line_pats = [ @@ -110,8 +109,7 @@ def write(self, s, tags=(), mark="insert"): Return: Length of text inserted. """ - if isinstance(s, bytes): - s = s.decode(iomenu.encoding, "replace") + assert isinstance(s, str) self.text.insert(mark, s, tags) self.text.see(mark) self.text.update() diff --git a/Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst b/Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst new file mode 100644 index 0000000000000..434be10b5309c --- /dev/null +++ b/Misc/NEWS.d/next/IDLE/2020-06-29-14-51-15.bpo-41152.d6mV0C.rst @@ -0,0 +1,2 @@ +The encoding of ``stdin``, ``stdout`` and ``stderr`` in IDLE is now always +UTF-8. From webhook-mailer at python.org Mon Jun 29 23:23:16 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 30 Jun 2020 03:23:16 -0000 Subject: [Python-checkins] bpo-41123: Remove PyUnicode_AsUnicodeCopy (GH-21209) Message-ID: https://github.com/python/cpython/commit/b3332660adb02babb7e66e45310c66dc9a9a94da commit: b3332660adb02babb7e66e45310c66dc9a9a94da branch: master author: Inada Naoki committer: GitHub date: 2020-06-30T12:23:07+09:00 summary: bpo-41123: Remove PyUnicode_AsUnicodeCopy (GH-21209) files: A Misc/NEWS.d/next/C API/2020-06-29-15-49-36.bpo-41123.wYY4E1.rst M Doc/c-api/unicode.rst M Doc/data/refcounts.dat M Doc/whatsnew/3.10.rst M Include/cpython/unicodeobject.h M Objects/unicodeobject.c diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index b1787ed1ce89c..0748a1e319489 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -724,20 +724,6 @@ Extension modules can continue using them, as they will not be removed in Python .. versionadded:: 3.3 -.. c:function:: Py_UNICODE* PyUnicode_AsUnicodeCopy(PyObject *unicode) - - Create a copy of a Unicode string ending with a null code point. Return ``NULL`` - and raise a :exc:`MemoryError` exception on memory allocation failure, - otherwise return a new allocated buffer (use :c:func:`PyMem_Free` to free - the buffer). Note that the resulting :c:type:`Py_UNICODE*` string may - contain embedded null code points, which would cause the string to be - truncated when used in most C functions. - - .. versionadded:: 3.2 - - Please migrate to using :c:func:`PyUnicode_AsUCS4Copy` or similar new APIs. - - .. c:function:: Py_ssize_t PyUnicode_GetSize(PyObject *unicode) Return the size of the deprecated :c:type:`Py_UNICODE` representation, in diff --git a/Doc/data/refcounts.dat b/Doc/data/refcounts.dat index 4d9aee370c61d..882d7d6d62fc3 100644 --- a/Doc/data/refcounts.dat +++ b/Doc/data/refcounts.dat @@ -2419,9 +2419,6 @@ PyUnicode_AsUnicodeAndSize:Py_UNICODE*::: PyUnicode_AsUnicodeAndSize:PyObject*:unicode:0: PyUnicode_AsUnicodeAndSize:Py_ssize_t*:size:: -PyUnicode_AsUnicodeCopy:Py_UNICODE*::: -PyUnicode_AsUnicodeCopy:PyObject*:unicode:0: - PyUnicode_GetSize:Py_ssize_t::: PyUnicode_GetSize:PyObject*:unicode:0: diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index a755d2672ae6d..0674ce8cff177 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -240,3 +240,7 @@ Removed * Removed ``PyLong_FromUnicode()``. Please migrate to :c:func:`PyLong_FromUnicodeObject`. (Contributed by Inada Naoki in :issue:`41103`.) + +* Removed ``PyUnicode_AsUnicodeCopy()``. Please use :c:func:`PyUnicode_AsUCS4Copy` or + :c:func:`PyUnicode_AsWideCharString` + (Contributed by Inada Naoki in :issue:`41103`.) diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index c1a8564349b99..88a97a4cb5f71 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -1162,14 +1162,6 @@ PyAPI_FUNC(int) _PyUnicode_IsAlpha( PyAPI_FUNC(PyObject*) _PyUnicode_FormatLong(PyObject *, int, int, int); -/* Create a copy of a unicode string ending with a nul character. Return NULL - and raise a MemoryError exception on memory allocation failure, otherwise - return a new allocated buffer (use PyMem_Free() to free the buffer). */ - -Py_DEPRECATED(3.3) PyAPI_FUNC(Py_UNICODE*) PyUnicode_AsUnicodeCopy( - PyObject *unicode - ); - /* Return an interned Unicode object for an Identifier; may fail if there is no memory.*/ PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*); diff --git a/Misc/NEWS.d/next/C API/2020-06-29-15-49-36.bpo-41123.wYY4E1.rst b/Misc/NEWS.d/next/C API/2020-06-29-15-49-36.bpo-41123.wYY4E1.rst new file mode 100644 index 0000000000000..74ac45462773e --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-29-15-49-36.bpo-41123.wYY4E1.rst @@ -0,0 +1 @@ +Remove ``PyUnicode_AsUnicodeCopy``. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index c46ba4ae57dc6..8eafdacf55974 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -15862,39 +15862,6 @@ unicode_iter(PyObject *seq) return (PyObject *)it; } -Py_UNICODE* -PyUnicode_AsUnicodeCopy(PyObject *unicode) -{ - Py_UNICODE *u, *copy; - Py_ssize_t len, size; - - if (!PyUnicode_Check(unicode)) { - PyErr_BadArgument(); - return NULL; - } -_Py_COMP_DIAG_PUSH -_Py_COMP_DIAG_IGNORE_DEPR_DECLS - u = PyUnicode_AsUnicodeAndSize(unicode, &len); -_Py_COMP_DIAG_POP - if (u == NULL) - return NULL; - /* Ensure we won't overflow the size. */ - if (len > ((PY_SSIZE_T_MAX / (Py_ssize_t)sizeof(Py_UNICODE)) - 1)) { - PyErr_NoMemory(); - return NULL; - } - size = len + 1; /* copy the null character */ - size *= sizeof(Py_UNICODE); - copy = PyMem_Malloc(size); - if (copy == NULL) { - PyErr_NoMemory(); - return NULL; - } - memcpy(copy, u, size); - return copy; -} - - static int encode_wstr_utf8(wchar_t *wstr, char **str, const char *name) { From webhook-mailer at python.org Tue Jun 30 02:03:23 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 30 Jun 2020 06:03:23 -0000 Subject: [Python-checkins] bpo-36346: Prepare for removing the legacy Unicode C API (AC only). (GH-21223) Message-ID: https://github.com/python/cpython/commit/349f76c6aace5a4a2b57f6b442a532faf0027d6b commit: 349f76c6aace5a4a2b57f6b442a532faf0027d6b branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-30T09:03:15+03:00 summary: bpo-36346: Prepare for removing the legacy Unicode C API (AC only). (GH-21223) files: M Include/cpython/unicodeobject.h M Lib/test/clinic.test M Modules/clinic/_winapi.c.h M Modules/clinic/posixmodule.c.h M Objects/unicodeobject.c M PC/clinic/winreg.c.h M Tools/clinic/clinic.py diff --git a/Include/cpython/unicodeobject.h b/Include/cpython/unicodeobject.h index 88a97a4cb5f71..0f19b2a14bcd0 100644 --- a/Include/cpython/unicodeobject.h +++ b/Include/cpython/unicodeobject.h @@ -11,6 +11,8 @@ /* --- Internal Unicode Operations ---------------------------------------- */ +#define USE_UNICODE_WCHAR_CACHE 1 + /* Since splitting on whitespace is an important use case, and whitespace in most situations is solely ASCII whitespace, we optimize for the common case by using a quick look-up table @@ -1169,4 +1171,7 @@ PyAPI_FUNC(PyObject*) _PyUnicode_FromId(_Py_Identifier*); and where the hash values are equal (i.e. a very probable match) */ PyAPI_FUNC(int) _PyUnicode_EQ(PyObject *, PyObject *); +PyAPI_FUNC(int) _PyUnicode_WideCharString_Converter(PyObject *, void *); +PyAPI_FUNC(int) _PyUnicode_WideCharString_Opt_Converter(PyObject *, void *); + PyAPI_FUNC(Py_ssize_t) _PyUnicode_ScanIdentifier(PyObject *); diff --git a/Lib/test/clinic.test b/Lib/test/clinic.test index f2be61355cc97..07e13829d5db9 100644 --- a/Lib/test/clinic.test +++ b/Lib/test/clinic.test @@ -1813,13 +1813,26 @@ test_Py_UNICODE_converter(PyObject *module, PyObject *const *args, Py_ssize_t na const Py_UNICODE *e; Py_ssize_clean_t e_length; - if (!_PyArg_ParseStack(args, nargs, "uuZu#Z#:test_Py_UNICODE_converter", - &a, &b, &c, &d, &d_length, &e, &e_length)) { + if (!_PyArg_ParseStack(args, nargs, "O&O&O&u#Z#:test_Py_UNICODE_converter", + _PyUnicode_WideCharString_Converter, &a, _PyUnicode_WideCharString_Converter, &b, _PyUnicode_WideCharString_Opt_Converter, &c, &d, &d_length, &e, &e_length)) { goto exit; } return_value = test_Py_UNICODE_converter_impl(module, a, b, c, d, d_length, e, e_length); exit: + /* Cleanup for a */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)a); + #endif /* USE_UNICODE_WCHAR_CACHE */ + /* Cleanup for b */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)b); + #endif /* USE_UNICODE_WCHAR_CACHE */ + /* Cleanup for c */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)c); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -1830,7 +1843,7 @@ test_Py_UNICODE_converter_impl(PyObject *module, const Py_UNICODE *a, Py_ssize_clean_t d_length, const Py_UNICODE *e, Py_ssize_clean_t e_length) -/*[clinic end generated code: output=dd0a09a1b772e57b input=064a3b68ad7f04b0]*/ +/*[clinic end generated code: output=ef45e982fedf0b3d input=064a3b68ad7f04b0]*/ /*[clinic input] diff --git a/Modules/clinic/_winapi.c.h b/Modules/clinic/_winapi.c.h index e21f2bc2b6fd6..6022dfe0db4b2 100644 --- a/Modules/clinic/_winapi.c.h +++ b/Modules/clinic/_winapi.c.h @@ -367,13 +367,22 @@ _winapi_CreateProcess(PyObject *module, PyObject *const *args, Py_ssize_t nargs) const Py_UNICODE *current_directory; PyObject *startup_info; - if (!_PyArg_ParseStack(args, nargs, "ZOOOikOZO:CreateProcess", - &application_name, &command_line, &proc_attrs, &thread_attrs, &inherit_handles, &creation_flags, &env_mapping, ¤t_directory, &startup_info)) { + if (!_PyArg_ParseStack(args, nargs, "O&OOOikOO&O:CreateProcess", + _PyUnicode_WideCharString_Opt_Converter, &application_name, &command_line, &proc_attrs, &thread_attrs, &inherit_handles, &creation_flags, &env_mapping, _PyUnicode_WideCharString_Opt_Converter, ¤t_directory, &startup_info)) { goto exit; } return_value = _winapi_CreateProcess_impl(module, application_name, command_line, proc_attrs, thread_attrs, inherit_handles, creation_flags, env_mapping, current_directory, startup_info); exit: + /* Cleanup for application_name */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)application_name); + #endif /* USE_UNICODE_WCHAR_CACHE */ + /* Cleanup for current_directory */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)current_directory); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -1097,4 +1106,4 @@ _winapi_GetFileType(PyObject *module, PyObject *const *args, Py_ssize_t nargs, P exit: return return_value; } -/*[clinic end generated code: output=f3897898ea1da99d input=a9049054013a1b77]*/ +/*[clinic end generated code: output=db87076a32fa7abe input=a9049054013a1b77]*/ diff --git a/Modules/clinic/posixmodule.c.h b/Modules/clinic/posixmodule.c.h index b691cfbc6edef..6533edfdb47d2 100644 --- a/Modules/clinic/posixmodule.c.h +++ b/Modules/clinic/posixmodule.c.h @@ -1674,12 +1674,28 @@ os_system(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k { PyObject *return_value = NULL; static const char * const _keywords[] = {"command", NULL}; - static _PyArg_Parser _parser = {"u:system", _keywords, 0}; + static _PyArg_Parser _parser = {NULL, _keywords, "system", 0}; + PyObject *argsbuf[1]; const Py_UNICODE *command; long _return_value; - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, - &command)) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 1, 0, argsbuf); + if (!args) { + goto exit; + } + if (!PyUnicode_Check(args[0])) { + _PyArg_BadArgument("system", "argument 'command'", "str", args[0]); + goto exit; + } + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + command = _PyUnicode_AsUnicode(args[0]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + command = PyUnicode_AsWideCharString(args[0], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (command == NULL) { goto exit; } _return_value = os_system_impl(module, command); @@ -1689,6 +1705,11 @@ os_system(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject *k return_value = PyLong_FromLong(_return_value); exit: + /* Cleanup for command */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)command); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -6998,19 +7019,47 @@ os_startfile(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObject { PyObject *return_value = NULL; static const char * const _keywords[] = {"filepath", "operation", NULL}; - static _PyArg_Parser _parser = {"O&|u:startfile", _keywords, 0}; + static _PyArg_Parser _parser = {NULL, _keywords, "startfile", 0}; + PyObject *argsbuf[2]; + Py_ssize_t noptargs = nargs + (kwnames ? PyTuple_GET_SIZE(kwnames) : 0) - 1; path_t filepath = PATH_T_INITIALIZE("startfile", "filepath", 0, 0); const Py_UNICODE *operation = NULL; - if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, - path_converter, &filepath, &operation)) { + args = _PyArg_UnpackKeywords(args, nargs, NULL, kwnames, &_parser, 1, 2, 0, argsbuf); + if (!args) { + goto exit; + } + if (!path_converter(args[0], &filepath)) { + goto exit; + } + if (!noptargs) { + goto skip_optional_pos; + } + if (!PyUnicode_Check(args[1])) { + _PyArg_BadArgument("startfile", "argument 'operation'", "str", args[1]); goto exit; } + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + operation = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + operation = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (operation == NULL) { + goto exit; + } +skip_optional_pos: return_value = os_startfile_impl(module, &filepath, operation); exit: /* Cleanup for filepath */ path_cleanup(&filepath); + /* Cleanup for operation */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)operation); + #endif /* USE_UNICODE_WCHAR_CACHE */ return return_value; } @@ -8876,4 +8925,4 @@ os_waitstatus_to_exitcode(PyObject *module, PyObject *const *args, Py_ssize_t na #ifndef OS_WAITSTATUS_TO_EXITCODE_METHODDEF #define OS_WAITSTATUS_TO_EXITCODE_METHODDEF #endif /* !defined(OS_WAITSTATUS_TO_EXITCODE_METHODDEF) */ -/*[clinic end generated code: output=d7c1212a94613496 input=a9049054013a1b77]*/ +/*[clinic end generated code: output=ba3d4b35fda2c208 input=a9049054013a1b77]*/ diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index 8eafdacf55974..db3f55e02b98b 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -3273,6 +3273,80 @@ PyUnicode_AsWideCharString(PyObject *unicode, #endif /* HAVE_WCHAR_H */ +int +_PyUnicode_WideCharString_Converter(PyObject *obj, void *ptr) +{ + wchar_t **p = (wchar_t **)ptr; + if (obj == NULL) { +#if !USE_UNICODE_WCHAR_CACHE + PyMem_Free(*p); +#endif /* USE_UNICODE_WCHAR_CACHE */ + *p = NULL; + return 1; + } + if (PyUnicode_Check(obj)) { +#if USE_UNICODE_WCHAR_CACHE +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS + *p = (wchar_t *)_PyUnicode_AsUnicode(obj); + if (*p == NULL) { + return 0; + } + return 1; +_Py_COMP_DIAG_POP +#else /* USE_UNICODE_WCHAR_CACHE */ + *p = PyUnicode_AsWideCharString(obj, NULL); + if (*p == NULL) { + return 0; + } + return Py_CLEANUP_SUPPORTED; +#endif /* USE_UNICODE_WCHAR_CACHE */ + } + PyErr_Format(PyExc_TypeError, + "argument must be str, not %.50s", + obj->ob_type->tp_name); + return 0; +} + +int +_PyUnicode_WideCharString_Opt_Converter(PyObject *obj, void *ptr) +{ + wchar_t **p = (wchar_t **)ptr; + if (obj == NULL) { +#if !USE_UNICODE_WCHAR_CACHE + PyMem_Free(*p); +#endif /* USE_UNICODE_WCHAR_CACHE */ + *p = NULL; + return 1; + } + if (obj == Py_None) { + *p = NULL; + return 1; + } + if (PyUnicode_Check(obj)) { +#if USE_UNICODE_WCHAR_CACHE +_Py_COMP_DIAG_PUSH +_Py_COMP_DIAG_IGNORE_DEPR_DECLS + *p = (wchar_t *)_PyUnicode_AsUnicode(obj); + if (*p == NULL) { + return 0; + } + return 1; +_Py_COMP_DIAG_POP +#else /* USE_UNICODE_WCHAR_CACHE */ + *p = PyUnicode_AsWideCharString(obj, NULL); + if (*p == NULL) { + return 0; + } + return Py_CLEANUP_SUPPORTED; +#endif /* USE_UNICODE_WCHAR_CACHE */ + } + PyErr_Format(PyExc_TypeError, + "argument must be str or None, not %.50s", + obj->ob_type->tp_name); + return 0; +} + PyObject * PyUnicode_FromOrdinal(int ordinal) { diff --git a/PC/clinic/winreg.c.h b/PC/clinic/winreg.c.h index 5f37fcda0a9ab..5c97eaeee9e27 100644 --- a/PC/clinic/winreg.c.h +++ b/PC/clinic/winreg.c.h @@ -152,8 +152,30 @@ winreg_ConnectRegistry(PyObject *module, PyObject *const *args, Py_ssize_t nargs HKEY key; HKEY _return_value; - if (!_PyArg_ParseStack(args, nargs, "ZO&:ConnectRegistry", - &computer_name, clinic_HKEY_converter, &key)) { + if (!_PyArg_CheckPositional("ConnectRegistry", nargs, 2, 2)) { + goto exit; + } + if (args[0] == Py_None) { + computer_name = NULL; + } + else if (PyUnicode_Check(args[0])) { + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + computer_name = _PyUnicode_AsUnicode(args[0]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + computer_name = PyUnicode_AsWideCharString(args[0], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (computer_name == NULL) { + goto exit; + } + } + else { + _PyArg_BadArgument("ConnectRegistry", "argument 1", "str or None", args[0]); + goto exit; + } + if (!clinic_HKEY_converter(args[1], &key)) { goto exit; } _return_value = winreg_ConnectRegistry_impl(module, computer_name, key); @@ -163,6 +185,11 @@ winreg_ConnectRegistry(PyObject *module, PyObject *const *args, Py_ssize_t nargs return_value = PyHKEY_FromHKEY(_return_value); exit: + /* Cleanup for computer_name */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)computer_name); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -199,8 +226,30 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) const Py_UNICODE *sub_key; HKEY _return_value; - if (!_PyArg_ParseStack(args, nargs, "O&Z:CreateKey", - clinic_HKEY_converter, &key, &sub_key)) { + if (!_PyArg_CheckPositional("CreateKey", nargs, 2, 2)) { + goto exit; + } + if (!clinic_HKEY_converter(args[0], &key)) { + goto exit; + } + if (args[1] == Py_None) { + sub_key = NULL; + } + else if (PyUnicode_Check(args[1])) { + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + sub_key = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + sub_key = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (sub_key == NULL) { + goto exit; + } + } + else { + _PyArg_BadArgument("CreateKey", "argument 2", "str or None", args[1]); goto exit; } _return_value = winreg_CreateKey_impl(module, key, sub_key); @@ -210,6 +259,11 @@ winreg_CreateKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) return_value = PyHKEY_FromHKEY(_return_value); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -251,7 +305,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py { PyObject *return_value = NULL; static const char * const _keywords[] = {"key", "sub_key", "reserved", "access", NULL}; - static _PyArg_Parser _parser = {"O&Z|ii:CreateKeyEx", _keywords, 0}; + static _PyArg_Parser _parser = {"O&O&|ii:CreateKeyEx", _keywords, 0}; HKEY key; const Py_UNICODE *sub_key; int reserved = 0; @@ -259,7 +313,7 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py HKEY _return_value; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, - clinic_HKEY_converter, &key, &sub_key, &reserved, &access)) { + clinic_HKEY_converter, &key, _PyUnicode_WideCharString_Opt_Converter, &sub_key, &reserved, &access)) { goto exit; } _return_value = winreg_CreateKeyEx_impl(module, key, sub_key, reserved, access); @@ -269,6 +323,11 @@ winreg_CreateKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py return_value = PyHKEY_FromHKEY(_return_value); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -303,13 +362,35 @@ winreg_DeleteKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) HKEY key; const Py_UNICODE *sub_key; - if (!_PyArg_ParseStack(args, nargs, "O&u:DeleteKey", - clinic_HKEY_converter, &key, &sub_key)) { + if (!_PyArg_CheckPositional("DeleteKey", nargs, 2, 2)) { + goto exit; + } + if (!clinic_HKEY_converter(args[0], &key)) { + goto exit; + } + if (!PyUnicode_Check(args[1])) { + _PyArg_BadArgument("DeleteKey", "argument 2", "str", args[1]); + goto exit; + } + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + sub_key = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + sub_key = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (sub_key == NULL) { goto exit; } return_value = winreg_DeleteKey_impl(module, key, sub_key); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -351,19 +432,24 @@ winreg_DeleteKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, Py { PyObject *return_value = NULL; static const char * const _keywords[] = {"key", "sub_key", "access", "reserved", NULL}; - static _PyArg_Parser _parser = {"O&u|ii:DeleteKeyEx", _keywords, 0}; + static _PyArg_Parser _parser = {"O&O&|ii:DeleteKeyEx", _keywords, 0}; HKEY key; const Py_UNICODE *sub_key; REGSAM access = KEY_WOW64_64KEY; int reserved = 0; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, - clinic_HKEY_converter, &key, &sub_key, &access, &reserved)) { + clinic_HKEY_converter, &key, _PyUnicode_WideCharString_Converter, &sub_key, &access, &reserved)) { goto exit; } return_value = winreg_DeleteKeyEx_impl(module, key, sub_key, access, reserved); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -391,13 +477,40 @@ winreg_DeleteValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) HKEY key; const Py_UNICODE *value; - if (!_PyArg_ParseStack(args, nargs, "O&Z:DeleteValue", - clinic_HKEY_converter, &key, &value)) { + if (!_PyArg_CheckPositional("DeleteValue", nargs, 2, 2)) { + goto exit; + } + if (!clinic_HKEY_converter(args[0], &key)) { + goto exit; + } + if (args[1] == Py_None) { + value = NULL; + } + else if (PyUnicode_Check(args[1])) { + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + value = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + value = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (value == NULL) { + goto exit; + } + } + else { + _PyArg_BadArgument("DeleteValue", "argument 2", "str or None", args[1]); goto exit; } return_value = winreg_DeleteValue_impl(module, key, value); exit: + /* Cleanup for value */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)value); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -517,12 +630,29 @@ winreg_ExpandEnvironmentStrings(PyObject *module, PyObject *arg) PyObject *return_value = NULL; const Py_UNICODE *string; - if (!PyArg_Parse(arg, "u:ExpandEnvironmentStrings", &string)) { + if (!PyUnicode_Check(arg)) { + _PyArg_BadArgument("ExpandEnvironmentStrings", "argument", "str", arg); + goto exit; + } + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + string = _PyUnicode_AsUnicode(arg); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + string = PyUnicode_AsWideCharString(arg, NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (string == NULL) { goto exit; } return_value = winreg_ExpandEnvironmentStrings_impl(module, string); exit: + /* Cleanup for string */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)string); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -609,13 +739,54 @@ winreg_LoadKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) const Py_UNICODE *sub_key; const Py_UNICODE *file_name; - if (!_PyArg_ParseStack(args, nargs, "O&uu:LoadKey", - clinic_HKEY_converter, &key, &sub_key, &file_name)) { + if (!_PyArg_CheckPositional("LoadKey", nargs, 3, 3)) { + goto exit; + } + if (!clinic_HKEY_converter(args[0], &key)) { + goto exit; + } + if (!PyUnicode_Check(args[1])) { + _PyArg_BadArgument("LoadKey", "argument 2", "str", args[1]); + goto exit; + } + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + sub_key = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + sub_key = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (sub_key == NULL) { + goto exit; + } + if (!PyUnicode_Check(args[2])) { + _PyArg_BadArgument("LoadKey", "argument 3", "str", args[2]); + goto exit; + } + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + file_name = _PyUnicode_AsUnicode(args[2]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + file_name = PyUnicode_AsWideCharString(args[2], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (file_name == NULL) { goto exit; } return_value = winreg_LoadKey_impl(module, key, sub_key, file_name); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + /* Cleanup for file_name */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)file_name); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -650,7 +821,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje { PyObject *return_value = NULL; static const char * const _keywords[] = {"key", "sub_key", "reserved", "access", NULL}; - static _PyArg_Parser _parser = {"O&Z|ii:OpenKey", _keywords, 0}; + static _PyArg_Parser _parser = {"O&O&|ii:OpenKey", _keywords, 0}; HKEY key; const Py_UNICODE *sub_key; int reserved = 0; @@ -658,7 +829,7 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje HKEY _return_value; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, - clinic_HKEY_converter, &key, &sub_key, &reserved, &access)) { + clinic_HKEY_converter, &key, _PyUnicode_WideCharString_Opt_Converter, &sub_key, &reserved, &access)) { goto exit; } _return_value = winreg_OpenKey_impl(module, key, sub_key, reserved, access); @@ -668,6 +839,11 @@ winreg_OpenKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyObje return_value = PyHKEY_FromHKEY(_return_value); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -702,7 +878,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb { PyObject *return_value = NULL; static const char * const _keywords[] = {"key", "sub_key", "reserved", "access", NULL}; - static _PyArg_Parser _parser = {"O&Z|ii:OpenKeyEx", _keywords, 0}; + static _PyArg_Parser _parser = {"O&O&|ii:OpenKeyEx", _keywords, 0}; HKEY key; const Py_UNICODE *sub_key; int reserved = 0; @@ -710,7 +886,7 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb HKEY _return_value; if (!_PyArg_ParseStackAndKeywords(args, nargs, kwnames, &_parser, - clinic_HKEY_converter, &key, &sub_key, &reserved, &access)) { + clinic_HKEY_converter, &key, _PyUnicode_WideCharString_Opt_Converter, &sub_key, &reserved, &access)) { goto exit; } _return_value = winreg_OpenKeyEx_impl(module, key, sub_key, reserved, access); @@ -720,6 +896,11 @@ winreg_OpenKeyEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs, PyOb return_value = PyHKEY_FromHKEY(_return_value); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -792,13 +973,40 @@ winreg_QueryValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) HKEY key; const Py_UNICODE *sub_key; - if (!_PyArg_ParseStack(args, nargs, "O&Z:QueryValue", - clinic_HKEY_converter, &key, &sub_key)) { + if (!_PyArg_CheckPositional("QueryValue", nargs, 2, 2)) { + goto exit; + } + if (!clinic_HKEY_converter(args[0], &key)) { + goto exit; + } + if (args[1] == Py_None) { + sub_key = NULL; + } + else if (PyUnicode_Check(args[1])) { + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + sub_key = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + sub_key = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (sub_key == NULL) { + goto exit; + } + } + else { + _PyArg_BadArgument("QueryValue", "argument 2", "str or None", args[1]); goto exit; } return_value = winreg_QueryValue_impl(module, key, sub_key); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -831,13 +1039,40 @@ winreg_QueryValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs) HKEY key; const Py_UNICODE *name; - if (!_PyArg_ParseStack(args, nargs, "O&Z:QueryValueEx", - clinic_HKEY_converter, &key, &name)) { + if (!_PyArg_CheckPositional("QueryValueEx", nargs, 2, 2)) { + goto exit; + } + if (!clinic_HKEY_converter(args[0], &key)) { + goto exit; + } + if (args[1] == Py_None) { + name = NULL; + } + else if (PyUnicode_Check(args[1])) { + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + name = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + name = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (name == NULL) { + goto exit; + } + } + else { + _PyArg_BadArgument("QueryValueEx", "argument 2", "str or None", args[1]); goto exit; } return_value = winreg_QueryValueEx_impl(module, key, name); exit: + /* Cleanup for name */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)name); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -875,13 +1110,35 @@ winreg_SaveKey(PyObject *module, PyObject *const *args, Py_ssize_t nargs) HKEY key; const Py_UNICODE *file_name; - if (!_PyArg_ParseStack(args, nargs, "O&u:SaveKey", - clinic_HKEY_converter, &key, &file_name)) { + if (!_PyArg_CheckPositional("SaveKey", nargs, 2, 2)) { + goto exit; + } + if (!clinic_HKEY_converter(args[0], &key)) { + goto exit; + } + if (!PyUnicode_Check(args[1])) { + _PyArg_BadArgument("SaveKey", "argument 2", "str", args[1]); + goto exit; + } + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + file_name = _PyUnicode_AsUnicode(args[1]); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + file_name = PyUnicode_AsWideCharString(args[1], NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if (file_name == NULL) { goto exit; } return_value = winreg_SaveKey_impl(module, key, file_name); exit: + /* Cleanup for file_name */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)file_name); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -929,13 +1186,18 @@ winreg_SetValue(PyObject *module, PyObject *const *args, Py_ssize_t nargs) const Py_UNICODE *value; Py_ssize_clean_t value_length; - if (!_PyArg_ParseStack(args, nargs, "O&Zku#:SetValue", - clinic_HKEY_converter, &key, &sub_key, &type, &value, &value_length)) { + if (!_PyArg_ParseStack(args, nargs, "O&O&ku#:SetValue", + clinic_HKEY_converter, &key, _PyUnicode_WideCharString_Opt_Converter, &sub_key, &type, &value, &value_length)) { goto exit; } return_value = winreg_SetValue_impl(module, key, sub_key, type, value, value_length); exit: + /* Cleanup for sub_key */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)sub_key); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -1000,13 +1262,18 @@ winreg_SetValueEx(PyObject *module, PyObject *const *args, Py_ssize_t nargs) DWORD type; PyObject *value; - if (!_PyArg_ParseStack(args, nargs, "O&ZOkO:SetValueEx", - clinic_HKEY_converter, &key, &value_name, &reserved, &type, &value)) { + if (!_PyArg_ParseStack(args, nargs, "O&O&OkO:SetValueEx", + clinic_HKEY_converter, &key, _PyUnicode_WideCharString_Opt_Converter, &value_name, &reserved, &type, &value)) { goto exit; } return_value = winreg_SetValueEx_impl(module, key, value_name, reserved, type, value); exit: + /* Cleanup for value_name */ + #if !USE_UNICODE_WCHAR_CACHE + PyMem_Free((void *)value_name); + #endif /* USE_UNICODE_WCHAR_CACHE */ + return return_value; } @@ -1111,4 +1378,4 @@ winreg_QueryReflectionKey(PyObject *module, PyObject *arg) exit: return return_value; } -/*[clinic end generated code: output=f4f996d40d06f14c input=a9049054013a1b77]*/ +/*[clinic end generated code: output=fa5f21ea6a75d0e9 input=a9049054013a1b77]*/ diff --git a/Tools/clinic/clinic.py b/Tools/clinic/clinic.py index b1bf7826ebf9f..3a9f4c228c22b 100755 --- a/Tools/clinic/clinic.py +++ b/Tools/clinic/clinic.py @@ -3374,20 +3374,81 @@ def parse_arg(self, argname, displayname): displayname=displayname) return super().parse_arg(argname, displayname) + at add_legacy_c_converter('u') @add_legacy_c_converter('u#', zeroes=True) @add_legacy_c_converter('Z', accept={str, NoneType}) @add_legacy_c_converter('Z#', accept={str, NoneType}, zeroes=True) class Py_UNICODE_converter(CConverter): type = 'const Py_UNICODE *' default_type = (str, Null, NoneType) - format_unit = 'u' def converter_init(self, *, accept={str}, zeroes=False): format_unit = 'Z' if accept=={str, NoneType} else 'u' if zeroes: format_unit += '#' self.length = True - self.format_unit = format_unit + self.format_unit = format_unit + else: + self.accept = accept + if accept == {str}: + self.converter = '_PyUnicode_WideCharString_Converter' + elif accept == {str, NoneType}: + self.converter = '_PyUnicode_WideCharString_Opt_Converter' + else: + fail("Py_UNICODE_converter: illegal 'accept' argument " + repr(accept)) + + def cleanup(self): + if not self.length: + return """\ +#if !USE_UNICODE_WCHAR_CACHE +PyMem_Free((void *){name}); +#endif /* USE_UNICODE_WCHAR_CACHE */ +""".format(name=self.name) + + def parse_arg(self, argname, argnum): + if not self.length: + if self.accept == {str}: + return """ + if (!PyUnicode_Check({argname})) {{{{ + _PyArg_BadArgument("{{name}}", {argnum}, "str", {argname}); + goto exit; + }}}} + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + {paramname} = _PyUnicode_AsUnicode({argname}); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + {paramname} = PyUnicode_AsWideCharString({argname}, NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if ({paramname} == NULL) {{{{ + goto exit; + }}}} + """.format(argname=argname, paramname=self.name, argnum=argnum) + elif self.accept == {str, NoneType}: + return """ + if ({argname} == Py_None) {{{{ + {paramname} = NULL; + }}}} + else if (PyUnicode_Check({argname})) {{{{ + #if USE_UNICODE_WCHAR_CACHE + _Py_COMP_DIAG_PUSH + _Py_COMP_DIAG_IGNORE_DEPR_DECLS + {paramname} = _PyUnicode_AsUnicode({argname}); + _Py_COMP_DIAG_POP + #else /* USE_UNICODE_WCHAR_CACHE */ + {paramname} = PyUnicode_AsWideCharString({argname}, NULL); + #endif /* USE_UNICODE_WCHAR_CACHE */ + if ({paramname} == NULL) {{{{ + goto exit; + }}}} + }}}} + else {{{{ + _PyArg_BadArgument("{{name}}", {argnum}, "str or None", {argname}); + goto exit; + }}}} + """.format(argname=argname, paramname=self.name, argnum=argnum) + return super().parse_arg(argname, argnum) @add_legacy_c_converter('s*', accept={str, buffer}) @add_legacy_c_converter('z*', accept={str, buffer, NoneType}) From webhook-mailer at python.org Tue Jun 30 02:27:04 2020 From: webhook-mailer at python.org (Inada Naoki) Date: Tue, 30 Jun 2020 06:27:04 -0000 Subject: [Python-checkins] bpo-36346: Raise DeprecationWarning when creating legacy Unicode (GH-20933) Message-ID: https://github.com/python/cpython/commit/038dd0f79dc89566b01ba66a5a018266b2917a19 commit: 038dd0f79dc89566b01ba66a5a018266b2917a19 branch: master author: Inada Naoki committer: GitHub date: 2020-06-30T15:26:56+09:00 summary: bpo-36346: Raise DeprecationWarning when creating legacy Unicode (GH-20933) files: A Misc/NEWS.d/next/C API/2020-06-17-20-31-12.bpo-36346.mwIyxi.rst M Doc/whatsnew/3.10.rst M Lib/test/test_unicode.py M Objects/unicodeobject.c diff --git a/Doc/whatsnew/3.10.rst b/Doc/whatsnew/3.10.rst index 0674ce8cff177..a3b53ba48e9b7 100644 --- a/Doc/whatsnew/3.10.rst +++ b/Doc/whatsnew/3.10.rst @@ -213,6 +213,11 @@ Porting to Python 3.10 for historical reason. It is no longer allowed. (Contributed by Victor Stinner in :issue:`40839`.) +* ``PyUnicode_FromUnicode(NULL, size)`` and ``PyUnicode_FromStringAndSize(NULL, size)`` + raise ``DeprecationWarning`` now. Use :c:func:`PyUnicode_New` to allocate + Unicode object without initial data. + (Contributed by Inada Naoki in :issue:`36346`.) + Removed ------- diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py index 6e397161fd98d..59697935fe5cd 100644 --- a/Lib/test/test_unicode.py +++ b/Lib/test/test_unicode.py @@ -725,7 +725,9 @@ def test_isidentifier_legacy(self): import _testcapi u = '???????' self.assertTrue(u.isidentifier()) - self.assertTrue(_testcapi.unicode_legacy_string(u).isidentifier()) + with support.check_warnings(): + warnings.simplefilter('ignore', DeprecationWarning) + self.assertTrue(_testcapi.unicode_legacy_string(u).isidentifier()) def test_isprintable(self): self.assertTrue("".isprintable()) diff --git a/Misc/NEWS.d/next/C API/2020-06-17-20-31-12.bpo-36346.mwIyxi.rst b/Misc/NEWS.d/next/C API/2020-06-17-20-31-12.bpo-36346.mwIyxi.rst new file mode 100644 index 0000000000000..9b0400399beb9 --- /dev/null +++ b/Misc/NEWS.d/next/C API/2020-06-17-20-31-12.bpo-36346.mwIyxi.rst @@ -0,0 +1,2 @@ +Raises DeprecationWarning for ``PyUnicode_FromUnicode(NULL, size)`` and +``PyUnicode_FromStringAndSize(NULL, size)`` with ``size > 0``. diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c index db3f55e02b98b..fe46de2ae4743 100644 --- a/Objects/unicodeobject.c +++ b/Objects/unicodeobject.c @@ -2179,8 +2179,16 @@ unicode_char(Py_UCS4 ch) PyObject * PyUnicode_FromUnicode(const Py_UNICODE *u, Py_ssize_t size) { - if (u == NULL) + if (u == NULL) { + if (size > 0) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "PyUnicode_FromUnicode(NULL, size) is deprecated; " + "use PyUnicode_New() instead", 1) < 0) { + return NULL; + } + } return (PyObject*)_PyUnicode_New(size); + } if (size < 0) { PyErr_BadInternalCall(); @@ -2266,10 +2274,19 @@ PyUnicode_FromStringAndSize(const char *u, Py_ssize_t size) "Negative size passed to PyUnicode_FromStringAndSize"); return NULL; } - if (u != NULL) + if (u != NULL) { return PyUnicode_DecodeUTF8Stateful(u, size, NULL, NULL); - else + } + else { + if (size > 0) { + if (PyErr_WarnEx(PyExc_DeprecationWarning, + "PyUnicode_FromStringAndSize(NULL, size) is deprecated; " + "use PyUnicode_New() instead", 1) < 0) { + return NULL; + } + } return (PyObject *)_PyUnicode_New(size); + } } PyObject * From webhook-mailer at python.org Tue Jun 30 02:33:30 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 30 Jun 2020 06:33:30 -0000 Subject: [Python-checkins] bpo-41158: IDLE: rewrite the code for handling file encoding (GH-21215) Message-ID: https://github.com/python/cpython/commit/694d31e714074176f0c324f95948b75dc768c091 commit: 694d31e714074176f0c324f95948b75dc768c091 branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-30T09:33:22+03:00 summary: bpo-41158: IDLE: rewrite the code for handling file encoding (GH-21215) files: M Lib/idlelib/iomenu.py diff --git a/Lib/idlelib/iomenu.py b/Lib/idlelib/iomenu.py index 7f3f656ee2874..7641d866858a1 100644 --- a/Lib/idlelib/iomenu.py +++ b/Lib/idlelib/iomenu.py @@ -1,10 +1,8 @@ -import codecs -from codecs import BOM_UTF8 import os -import re import shlex import sys import tempfile +import tokenize import tkinter.filedialog as tkFileDialog import tkinter.messagebox as tkMessageBox @@ -20,49 +18,6 @@ errors = 'surrogateescape' -coding_re = re.compile(r'^[ \t\f]*#.*?coding[:=][ \t]*([-\w.]+)', re.ASCII) -blank_re = re.compile(r'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) - -def coding_spec(data): - """Return the encoding declaration according to PEP 263. - - When checking encoded data, only the first two lines should be passed - in to avoid a UnicodeDecodeError if the rest of the data is not unicode. - The first two lines would contain the encoding specification. - - Raise a LookupError if the encoding is declared but unknown. - """ - if isinstance(data, bytes): - # This encoding might be wrong. However, the coding - # spec must be ASCII-only, so any non-ASCII characters - # around here will be ignored. Decoding to Latin-1 should - # never fail (except for memory outage) - lines = data.decode('iso-8859-1') - else: - lines = data - # consider only the first two lines - if '\n' in lines: - lst = lines.split('\n', 2)[:2] - elif '\r' in lines: - lst = lines.split('\r', 2)[:2] - else: - lst = [lines] - for line in lst: - match = coding_re.match(line) - if match is not None: - break - if not blank_re.match(line): - return None - else: - return None - name = match.group(1) - try: - codecs.lookup(name) - except LookupError: - # The standard encoding error does not indicate the encoding - raise LookupError("Unknown encoding: "+name) - return name - class IOBinding: # One instance per editor Window so methods know which to save, close. @@ -78,7 +33,7 @@ def __init__(self, editwin): self.save_as) self.__id_savecopy = self.text.bind("<>", self.save_a_copy) - self.fileencoding = None + self.fileencoding = 'utf-8' self.__id_print = self.text.bind("<>", self.print_window) def close(self): @@ -165,34 +120,44 @@ def open(self, event=None, editFile=None): self.text.focus_set() return "break" - eol = r"(\r\n)|\n|\r" # \r\n (Windows), \n (UNIX), or \r (Mac) - eol_re = re.compile(eol) eol_convention = os.linesep # default def loadfile(self, filename): try: - # open the file in binary mode so that we can handle - # end-of-line convention ourselves. - with open(filename, 'rb') as f: - two_lines = f.readline() + f.readline() - f.seek(0) - bytes = f.read() - except OSError as msg: - tkMessageBox.showerror("I/O Error", str(msg), parent=self.text) + try: + with tokenize.open(filename) as f: + chars = f.read() + fileencoding = f.encoding + eol_convention = f.newlines + converted = False + except (UnicodeDecodeError, SyntaxError): + # Wait for the editor window to appear + self.editwin.text.update() + enc = askstring( + "Specify file encoding", + "The file's encoding is invalid for Python 3.x.\n" + "IDLE will convert it to UTF-8.\n" + "What is the current encoding of the file?", + initialvalue='utf-8', + parent=self.editwin.text) + with open(filename, encoding=enc) as f: + chars = f.read() + fileencoding = f.encoding + eol_convention = f.newlines + converted = True + except OSError as err: + tkMessageBox.showerror("I/O Error", str(err), parent=self.text) return False - chars, converted = self._decode(two_lines, bytes) - if chars is None: + except UnicodeDecodeError: tkMessageBox.showerror("Decoding Error", "File %s\nFailed to Decode" % filename, parent=self.text) return False - # We now convert all end-of-lines to '\n's - firsteol = self.eol_re.search(chars) - if firsteol: - self.eol_convention = firsteol.group(0) - chars = self.eol_re.sub(r"\n", chars) + self.text.delete("1.0", "end") self.set_filename(None) + self.fileencoding = fileencoding + self.eol_convention = eol_convention self.text.insert("1.0", chars) self.reset_undo() self.set_filename(filename) @@ -205,74 +170,6 @@ def loadfile(self, filename): self.updaterecentfileslist(filename) return True - def _decode(self, two_lines, bytes): - "Create a Unicode string." - chars = None - # Check presence of a UTF-8 signature first - if bytes.startswith(BOM_UTF8): - try: - chars = bytes[3:].decode("utf-8") - except UnicodeDecodeError: - # has UTF-8 signature, but fails to decode... - return None, False - else: - # Indicates that this file originally had a BOM - self.fileencoding = 'BOM' - return chars, False - # Next look for coding specification - try: - enc = coding_spec(two_lines) - except LookupError as name: - tkMessageBox.showerror( - title="Error loading the file", - message="The encoding '%s' is not known to this Python "\ - "installation. The file may not display correctly" % name, - parent = self.text) - enc = None - except UnicodeDecodeError: - return None, False - if enc: - try: - chars = str(bytes, enc) - self.fileencoding = enc - return chars, False - except UnicodeDecodeError: - pass - # Try ascii: - try: - chars = str(bytes, 'ascii') - self.fileencoding = None - return chars, False - except UnicodeDecodeError: - pass - # Try utf-8: - try: - chars = str(bytes, 'utf-8') - self.fileencoding = 'utf-8' - return chars, False - except UnicodeDecodeError: - pass - # Finally, try the locale's encoding. This is deprecated; - # the user should declare a non-ASCII encoding - try: - # Wait for the editor window to appear - self.editwin.text.update() - enc = askstring( - "Specify file encoding", - "The file's encoding is invalid for Python 3.x.\n" - "IDLE will convert it to UTF-8.\n" - "What is the current encoding of the file?", - initialvalue = encoding, - parent = self.editwin.text) - - if enc: - chars = str(bytes, enc) - self.fileencoding = None - return chars, True - except (UnicodeDecodeError, LookupError): - pass - return None, False # None on failure - def maybesave(self): if self.get_saved(): return "yes" @@ -360,38 +257,30 @@ def encode(self, chars): # text to us. Don't try to guess further. return chars # Preserve a BOM that might have been present on opening - if self.fileencoding == 'BOM': - return BOM_UTF8 + chars.encode("utf-8") + if self.fileencoding == 'utf-8-sig': + return chars.encode('utf-8-sig') # See whether there is anything non-ASCII in it. # If not, no need to figure out the encoding. try: return chars.encode('ascii') - except UnicodeError: + except UnicodeEncodeError: pass # Check if there is an encoding declared try: - # a string, let coding_spec slice it to the first two lines - enc = coding_spec(chars) - failed = None - except LookupError as msg: - failed = msg - enc = None - else: - if not enc: - # PEP 3120: default source encoding is UTF-8 - enc = 'utf-8' - if enc: - try: - return chars.encode(enc) - except UnicodeError: - failed = "Invalid encoding '%s'" % enc + encoded = chars.encode('ascii', 'replace') + enc, _ = tokenize.detect_encoding(io.BytesIO(encoded).readline) + return chars.encode(enc) + except SyntaxError as err: + failed = str(err) + except UnicodeEncodeError: + failed = "Invalid encoding '%s'" % enc tkMessageBox.showerror( "I/O Error", "%s.\nSaving as UTF-8" % failed, - parent = self.text) + parent=self.text) # Fallback: save as UTF-8, with BOM - ignoring the incorrect # declared encoding - return BOM_UTF8 + chars.encode("utf-8") + return chars.encode('utf-8-sig') def print_window(self, event): confirm = tkMessageBox.askokcancel( From webhook-mailer at python.org Tue Jun 30 04:49:18 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 30 Jun 2020 08:49:18 -0000 Subject: [Python-checkins] bpo-41123: Remove PyUnicode_AsUnicodeCopy in 3.10 (GH-21227) Message-ID: https://github.com/python/cpython/commit/41d6e3fbb8bcfd41db37782523caac47e7c8ad23 commit: 41d6e3fbb8bcfd41db37782523caac47e7c8ad23 branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-30T01:49:09-07:00 summary: bpo-41123: Remove PyUnicode_AsUnicodeCopy in 3.10 (GH-21227) (cherry picked from commit 2ea6a9928e4fa135888cc8f4733c28d93e642301) Co-authored-by: Inada Naoki files: M Doc/c-api/unicode.rst diff --git a/Doc/c-api/unicode.rst b/Doc/c-api/unicode.rst index 2bf4a0f56bc8a..b261efe2ba477 100644 --- a/Doc/c-api/unicode.rst +++ b/Doc/c-api/unicode.rst @@ -700,6 +700,8 @@ Extension modules can continue using them, as they will not be removed in Python :c:func:`PyUnicode_AsWideChar`, :c:func:`PyUnicode_ReadChar` or similar new APIs. + .. deprecated-removed:: 3.3 3.10 + .. c:function:: PyObject* PyUnicode_TransformDecimalToASCII(Py_UNICODE *s, Py_ssize_t size) From webhook-mailer at python.org Tue Jun 30 04:56:09 2020 From: webhook-mailer at python.org (Serhiy Storchaka) Date: Tue, 30 Jun 2020 08:56:09 -0000 Subject: [Python-checkins] bpo-41142: Add support of non-ASCII paths for CAB files. (GH-21195) Message-ID: https://github.com/python/cpython/commit/ba67d7386edf20bcc0f878a518de0894cb574e9f commit: ba67d7386edf20bcc0f878a518de0894cb574e9f branch: master author: Serhiy Storchaka committer: GitHub date: 2020-06-30T11:56:03+03:00 summary: bpo-41142: Add support of non-ASCII paths for CAB files. (GH-21195) * The path to the CAB file can be non-ASCII. * Paths of added files can be non-ASCII. files: A Misc/NEWS.d/next/Windows/2020-06-28-12-40-41.bpo-41142.jpZzzh.rst M Lib/test/test_msilib.py M PC/_msi.c diff --git a/Lib/test/test_msilib.py b/Lib/test/test_msilib.py index 4a233c3784e51..743bea7c14d0e 100644 --- a/Lib/test/test_msilib.py +++ b/Lib/test/test_msilib.py @@ -112,6 +112,16 @@ def test_getproperty_uninitialized_var(self): with self.assertRaises(msilib.MSIError): si.GetProperty(-1) + def test_FCICreate(self): + filepath = TESTFN + '.txt' + cabpath = TESTFN + '.cab' + self.addCleanup(unlink, filepath) + with open(filepath, 'wb'): + pass + self.addCleanup(unlink, cabpath) + msilib.FCICreate(cabpath, [(filepath, 'test.txt')]) + self.assertTrue(os.path.isfile(cabpath)) + class Test_make_id(unittest.TestCase): #http://msdn.microsoft.com/en-us/library/aa369212(v=vs.85).aspx diff --git a/Misc/NEWS.d/next/Windows/2020-06-28-12-40-41.bpo-41142.jpZzzh.rst b/Misc/NEWS.d/next/Windows/2020-06-28-12-40-41.bpo-41142.jpZzzh.rst new file mode 100644 index 0000000000000..91406da7a2544 --- /dev/null +++ b/Misc/NEWS.d/next/Windows/2020-06-28-12-40-41.bpo-41142.jpZzzh.rst @@ -0,0 +1,2 @@ +:mod:`msilib` now supports creating CAB files with non-ASCII file path and +adding files with non-ASCII file path to them. diff --git a/PC/_msi.c b/PC/_msi.c index 58c1cfd997bf8..60a0c3aebb1e7 100644 --- a/PC/_msi.c +++ b/PC/_msi.c @@ -41,21 +41,50 @@ uuidcreate(PyObject* obj, PyObject*args) } +/* Helper for converting file names from UTF-8 to wchat_t*. */ +static wchar_t * +utf8_to_wchar(const char *s, int *err) +{ + PyObject *obj = PyUnicode_FromString(s); + if (obj == NULL) { + if (PyErr_ExceptionMatches(PyExc_MemoryError)) { + *err = ENOMEM; + } + else { + *err = EINVAL; + } + PyErr_Clear(); + return NULL; + } + wchar_t *ws = PyUnicode_AsWideCharString(obj, NULL); + if (ws == NULL) { + *err = ENOMEM; + PyErr_Clear(); + } + Py_DECREF(obj); + return ws; +} + /* FCI callback functions */ static FNFCIALLOC(cb_alloc) { - return malloc(cb); + return PyMem_RawMalloc(cb); } static FNFCIFREE(cb_free) { - free(memory); + PyMem_RawFree(memory); } static FNFCIOPEN(cb_open) { - int result = _open(pszFile, oflag | O_NOINHERIT, pmode); + wchar_t *ws = utf8_to_wchar(pszFile, err); + if (ws == NULL) { + return -1; + } + int result = _wopen(ws, oflag | O_NOINHERIT, pmode); + PyMem_Free(ws); if (result == -1) *err = errno; return result; @@ -95,7 +124,12 @@ static FNFCISEEK(cb_seek) static FNFCIDELETE(cb_delete) { - int result = remove(pszFile); + wchar_t *ws = utf8_to_wchar(pszFile, err); + if (ws == NULL) { + return -1; + } + int result = _wremove(ws); + PyMem_Free(ws); if (result != 0) *err = errno; return result; @@ -159,15 +193,22 @@ static FNFCIGETOPENINFO(cb_getopeninfo) FILETIME filetime; HANDLE handle; + wchar_t *ws = utf8_to_wchar(pszName, err); + if (ws == NULL) { + return -1; + } + /* Need Win32 handle to get time stamps */ - handle = CreateFile(pszName, GENERIC_READ, FILE_SHARE_READ, NULL, + handle = CreateFileW(ws, GENERIC_READ, FILE_SHARE_READ, NULL, OPEN_EXISTING, FILE_ATTRIBUTE_NORMAL, NULL); - if (handle == INVALID_HANDLE_VALUE) + if (handle == INVALID_HANDLE_VALUE) { + PyMem_Free(ws); return -1; + } - if (GetFileInformationByHandle(handle, &bhfi) == FALSE) - { + if (GetFileInformationByHandle(handle, &bhfi) == FALSE) { CloseHandle(handle); + PyMem_Free(ws); return -1; } @@ -179,7 +220,9 @@ static FNFCIGETOPENINFO(cb_getopeninfo) CloseHandle(handle); - return _open(pszName, _O_RDONLY | _O_BINARY | O_NOINHERIT); + int result = _wopen(ws, _O_RDONLY | _O_BINARY | O_NOINHERIT); + PyMem_Free(ws); + return result; } static PyObject* fcicreate(PyObject* obj, PyObject* args) @@ -212,7 +255,7 @@ static PyObject* fcicreate(PyObject* obj, PyObject* args) ccab.setID = 0; ccab.szDisk[0] = '\0'; - for (i = 0, p = cabname; *p; p = CharNext(p)) + for (i = 0, p = cabname; *p; p++) if (*p == '\\' || *p == '/') i = p - cabname + 1; From webhook-mailer at python.org Tue Jun 30 05:15:57 2020 From: webhook-mailer at python.org (Lawrence D'Anna) Date: Tue, 30 Jun 2020 09:15:57 -0000 Subject: [Python-checkins] bpo-41100: fix _decimal for arm64 Mac OS (GH-21228) Message-ID: https://github.com/python/cpython/commit/604d95e235d86465b8c17f02095edcaf18464d4c commit: 604d95e235d86465b8c17f02095edcaf18464d4c branch: master author: Lawrence D'Anna <64555057+lawrence-danna-apple at users.noreply.github.com> committer: GitHub date: 2020-06-30T11:15:46+02:00 summary: bpo-41100: fix _decimal for arm64 Mac OS (GH-21228) Patch by Lawrence Danna. files: A Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst M Modules/_decimal/libmpdec/mpdecimal.h diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst new file mode 100644 index 0000000000000..d6176d69f0eb0 --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-06-30-04-44-29.bpo-41100.PJwA6F.rst @@ -0,0 +1 @@ +add arm64 to the allowable Mac OS arches in mpdecimal.h \ No newline at end of file diff --git a/Modules/_decimal/libmpdec/mpdecimal.h b/Modules/_decimal/libmpdec/mpdecimal.h index 108b76efa8594..35ce429f60124 100644 --- a/Modules/_decimal/libmpdec/mpdecimal.h +++ b/Modules/_decimal/libmpdec/mpdecimal.h @@ -127,6 +127,9 @@ const char *mpd_version(void); #elif defined(__x86_64__) #define CONFIG_64 #define ASM + #elif defined(__arm64__) + #define CONFIG_64 + #define ANSI #else #error "unknown architecture for universal build." #endif From webhook-mailer at python.org Tue Jun 30 05:42:48 2020 From: webhook-mailer at python.org (E-Paine) Date: Tue, 30 Jun 2020 09:42:48 -0000 Subject: [Python-checkins] Update FAQ release schedule and estimated users (GH-21180) Message-ID: https://github.com/python/cpython/commit/3fa4799c3f9d9de7cac30e5db3627e9e125b9ce5 commit: 3fa4799c3f9d9de7cac30e5db3627e9e125b9ce5 branch: master author: E-Paine <63801254+E-Paine at users.noreply.github.com> committer: GitHub date: 2020-06-30T05:42:43-04:00 summary: Update FAQ release schedule and estimated users (GH-21180) Update FAQ to include: * The new yearly release schedule from PEP 602 * Estimated users from "tens of thousands" to "millions" files: M Doc/faq/general.rst diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index 70837341b1b33..eee3c3c203efa 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -296,8 +296,8 @@ How stable is Python? --------------------- Very stable. New, stable releases have been coming out roughly every 6 to 18 -months since 1991, and this seems likely to continue. Currently there are -usually around 18 months between major releases. +months since 1991, and this seems likely to continue. As of version 3.9, +Python will have a major new release every 12 months (:pep:`602`). The developers issue "bugfix" releases of older versions, so the stability of existing releases gradually improves. Bugfix releases, indicated by a third @@ -315,8 +315,8 @@ be maintained after January 1, 2020 ` How many people are using Python? --------------------------------- -There are probably tens of thousands of users, though it's difficult to obtain -an exact count. +There are probably millions of users, though it's difficult to obtain an exact +count. Python is available for free download, so there are no sales figures, and it's available from many different sites and packaged with many Linux distributions, From webhook-mailer at python.org Tue Jun 30 05:51:25 2020 From: webhook-mailer at python.org (Miss Islington (bot)) Date: Tue, 30 Jun 2020 09:51:25 -0000 Subject: [Python-checkins] Update FAQ release schedule and estimated users (GH-21180) Message-ID: https://github.com/python/cpython/commit/c81f9e2d0a78d37209142471dad0fd433220f2ae commit: c81f9e2d0a78d37209142471dad0fd433220f2ae branch: 3.8 author: Miss Islington (bot) <31488909+miss-islington at users.noreply.github.com> committer: GitHub date: 2020-06-30T02:51:21-07:00 summary: Update FAQ release schedule and estimated users (GH-21180) Update FAQ to include: * The new yearly release schedule from PEP 602 * Estimated users from "tens of thousands" to "millions" (cherry picked from commit 3fa4799c3f9d9de7cac30e5db3627e9e125b9ce5) Co-authored-by: E-Paine <63801254+E-Paine at users.noreply.github.com> files: M Doc/faq/general.rst diff --git a/Doc/faq/general.rst b/Doc/faq/general.rst index 70837341b1b33..eee3c3c203efa 100644 --- a/Doc/faq/general.rst +++ b/Doc/faq/general.rst @@ -296,8 +296,8 @@ How stable is Python? --------------------- Very stable. New, stable releases have been coming out roughly every 6 to 18 -months since 1991, and this seems likely to continue. Currently there are -usually around 18 months between major releases. +months since 1991, and this seems likely to continue. As of version 3.9, +Python will have a major new release every 12 months (:pep:`602`). The developers issue "bugfix" releases of older versions, so the stability of existing releases gradually improves. Bugfix releases, indicated by a third @@ -315,8 +315,8 @@ be maintained after January 1, 2020 ` How many people are using Python? --------------------------------- -There are probably tens of thousands of users, though it's difficult to obtain -an exact count. +There are probably millions of users, though it's difficult to obtain an exact +count. Python is available for free download, so there are no sales figures, and it's available from many different sites and packaged with many Linux distributions, From webhook-mailer at python.org Tue Jun 30 09:46:13 2020 From: webhook-mailer at python.org (Hai Shi) Date: Tue, 30 Jun 2020 13:46:13 -0000 Subject: [Python-checkins] bpo-40275: Use new test.support helper submodules in tests (GH-21219) Message-ID: https://github.com/python/cpython/commit/3ddc634cd5469550c0c2dc5a6051a70739995699 commit: 3ddc634cd5469550c0c2dc5a6051a70739995699 branch: master author: Hai Shi committer: GitHub date: 2020-06-30T15:46:06+02:00 summary: bpo-40275: Use new test.support helper submodules in tests (GH-21219) files: M Lib/test/audiotests.py M Lib/test/libregrtest/cmdline.py M Lib/test/libregrtest/main.py M Lib/test/libregrtest/runtest_mp.py M Lib/test/test_aifc.py M Lib/test/test_bz2.py M Lib/test/test_functools.py M Lib/test/test_future.py M Lib/test/test_logging.py M Lib/test/test_poll.py M Lib/test/test_regrtest.py M Lib/test/test_smtpd.py M Lib/test/test_time.py M Lib/test/test_unicode_file.py M Lib/test/test_urllib.py M Lib/test/test_zipimport_support.py diff --git a/Lib/test/audiotests.py b/Lib/test/audiotests.py index d3e8e9ee44a13..9d6c4cc2b4b02 100644 --- a/Lib/test/audiotests.py +++ b/Lib/test/audiotests.py @@ -1,4 +1,5 @@ -from test.support import findfile, TESTFN, unlink +from test.support import findfile +from test.support.os_helper import TESTFN, unlink import array import io import pickle diff --git a/Lib/test/libregrtest/cmdline.py b/Lib/test/libregrtest/cmdline.py index c0bb051bd0783..a4bac79c8af68 100644 --- a/Lib/test/libregrtest/cmdline.py +++ b/Lib/test/libregrtest/cmdline.py @@ -2,6 +2,7 @@ import os import sys from test import support +from test.support import os_helper USAGE = """\ @@ -291,7 +292,7 @@ def _create_parser(): def relative_filename(string): # CWD is replaced with a temporary dir before calling main(), so we # join it with the saved CWD so it ends up where the user expects. - return os.path.join(support.SAVEDCWD, string) + return os.path.join(os_helper.SAVEDCWD, string) def huntrleaks(string): diff --git a/Lib/test/libregrtest/main.py b/Lib/test/libregrtest/main.py index 7675a97b5b48e..793c99a8f4ca3 100644 --- a/Lib/test/libregrtest/main.py +++ b/Lib/test/libregrtest/main.py @@ -216,7 +216,7 @@ def find_tests(self, tests): # regex to match 'test_builtin' in line: # '0:00:00 [ 4/400] test_builtin -- test_dict took 1 sec' regex = re.compile(r'\btest_[a-zA-Z0-9_]+\b') - with open(os.path.join(support.SAVEDCWD, self.ns.fromfile)) as fp: + with open(os.path.join(os_helper.SAVEDCWD, self.ns.fromfile)) as fp: for line in fp: line = line.split('#', 1)[0] line = line.strip() @@ -559,7 +559,7 @@ def save_xml_result(self): for k, v in totals.items(): root.set(k, str(v)) - xmlpath = os.path.join(support.SAVEDCWD, self.ns.xmlpath) + xmlpath = os.path.join(os_helper.SAVEDCWD, self.ns.xmlpath) with open(xmlpath, 'wb') as f: for s in ET.tostringlist(root): f.write(s) @@ -597,7 +597,7 @@ def create_temp_dir(self): test_cwd = 'test_python_worker_{}'.format(pid) else: test_cwd = 'test_python_{}'.format(pid) - test_cwd += support.FS_NONASCII + test_cwd += os_helper.FS_NONASCII test_cwd = os.path.join(self.tmp_dir, test_cwd) return test_cwd @@ -609,10 +609,10 @@ def cleanup(self): for name in glob.glob(path): if os.path.isdir(name): print("Remove directory: %s" % name) - support.rmtree(name) + os_helper.rmtree(name) else: print("Remove file: %s" % name) - support.unlink(name) + os_helper.unlink(name) def main(self, tests=None, **kwargs): self.parse_args(kwargs) @@ -629,7 +629,7 @@ def main(self, tests=None, **kwargs): # Run the tests in a context manager that temporarily changes the CWD # to a temporary and writable directory. If it's not possible to # create or change the CWD, the original CWD will be used. - # The original CWD is available from support.SAVEDCWD. + # The original CWD is available from os_helper.SAVEDCWD. with os_helper.temp_cwd(test_cwd, quiet=True): # When using multiprocessing, worker processes will use test_cwd # as their parent temporary directory. So when the main process diff --git a/Lib/test/libregrtest/runtest_mp.py b/Lib/test/libregrtest/runtest_mp.py index 7a18e45434bb4..3d503af23b6a7 100644 --- a/Lib/test/libregrtest/runtest_mp.py +++ b/Lib/test/libregrtest/runtest_mp.py @@ -11,6 +11,7 @@ import traceback import types from test import support +from test.support import os_helper from test.libregrtest.runtest import ( runtest, INTERRUPTED, CHILD_ERROR, PROGRESS_MIN_TIME, @@ -70,7 +71,7 @@ def run_test_in_subprocess(testname, ns): stderr=subprocess.PIPE, universal_newlines=True, close_fds=(os.name != 'nt'), - cwd=support.SAVEDCWD, + cwd=os_helper.SAVEDCWD, **kw) diff --git a/Lib/test/test_aifc.py b/Lib/test/test_aifc.py index 5a95099cc5cdf..fb6da4136f4c5 100644 --- a/Lib/test/test_aifc.py +++ b/Lib/test/test_aifc.py @@ -1,4 +1,6 @@ -from test.support import check_no_resource_warning, findfile, TESTFN, unlink +from test.support import findfile +from test.support.os_helper import TESTFN, unlink +from test.support.warnings_helper import check_no_resource_warning import unittest from unittest import mock from test import audiotests diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py index 8f0773d55faef..ccc5e4df83a7a 100644 --- a/Lib/test/test_bz2.py +++ b/Lib/test/test_bz2.py @@ -12,14 +12,15 @@ import shutil import subprocess import threading +from test.support import import_helper from test.support import threading_helper -from test.support import unlink +from test.support.os_helper import unlink import _compression import sys # Skip tests if the bz2 module doesn't exist. -bz2 = support.import_module('bz2') +bz2 = import_helper.import_module('bz2') from bz2 import BZ2File, BZ2Compressor, BZ2Decompressor has_cmdline_bunzip2 = None diff --git a/Lib/test/test_functools.py b/Lib/test/test_functools.py index e726188982bc4..edd5773e13d54 100644 --- a/Lib/test/test_functools.py +++ b/Lib/test/test_functools.py @@ -19,15 +19,18 @@ from weakref import proxy import contextlib +from test.support import import_helper from test.support import threading_helper from test.support.script_helper import assert_python_ok import functools -py_functools = support.import_fresh_module('functools', blocked=['_functools']) -c_functools = support.import_fresh_module('functools', fresh=['_functools']) +py_functools = import_helper.import_fresh_module('functools', + blocked=['_functools']) +c_functools = import_helper.import_fresh_module('functools', + fresh=['_functools']) -decimal = support.import_fresh_module('decimal', fresh=['_decimal']) +decimal = import_helper.import_fresh_module('decimal', fresh=['_decimal']) @contextlib.contextmanager def replaced_module(name, replacement): diff --git a/Lib/test/test_future.py b/Lib/test/test_future.py index 0f40357b3a731..e4715587d21cf 100644 --- a/Lib/test/test_future.py +++ b/Lib/test/test_future.py @@ -4,6 +4,7 @@ import ast import unittest from test import support +from test.support import import_helper from textwrap import dedent import os import re @@ -24,17 +25,17 @@ def check_syntax_error(self, err, basename, lineno, offset=1): self.assertEqual(err.offset, offset) def test_future1(self): - with support.CleanImport('future_test1'): + with import_helper.CleanImport('future_test1'): from test import future_test1 self.assertEqual(future_test1.result, 6) def test_future2(self): - with support.CleanImport('future_test2'): + with import_helper.CleanImport('future_test2'): from test import future_test2 self.assertEqual(future_test2.result, 6) def test_future3(self): - with support.CleanImport('test_future3'): + with import_helper.CleanImport('test_future3'): from test import test_future3 def test_badfuture3(self): @@ -113,7 +114,7 @@ def test_parserhack(self): self.fail("syntax error didn't occur") def test_multiple_features(self): - with support.CleanImport("test.test_future5"): + with import_helper.CleanImport("test.test_future5"): from test import test_future5 def test_unicode_literals_exec(self): diff --git a/Lib/test/test_logging.py b/Lib/test/test_logging.py index 2ae00b6e3b4e9..eb5b926908a19 100644 --- a/Lib/test/test_logging.py +++ b/Lib/test/test_logging.py @@ -42,8 +42,10 @@ import tempfile from test.support.script_helper import assert_python_ok, assert_python_failure from test import support +from test.support import os_helper from test.support import socket_helper from test.support import threading_helper +from test.support import warnings_helper from test.support.logging_helper import TestHandler import textwrap import threading @@ -1169,7 +1171,7 @@ class ConfigFileTest(BaseTest): """Reading logging config from a .ini-style config file.""" - check_no_resource_warning = support.check_no_resource_warning + check_no_resource_warning = warnings_helper.check_no_resource_warning expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. @@ -1756,7 +1758,7 @@ def setUp(self): def tearDown(self): SocketHandlerTest.tearDown(self) - support.unlink(self.address) + os_helper.unlink(self.address) class DatagramHandlerTest(BaseTest): @@ -1837,7 +1839,7 @@ def setUp(self): def tearDown(self): DatagramHandlerTest.tearDown(self) - support.unlink(self.address) + os_helper.unlink(self.address) class SysLogHandlerTest(BaseTest): @@ -1921,7 +1923,7 @@ def setUp(self): def tearDown(self): SysLogHandlerTest.tearDown(self) - support.unlink(self.address) + os_helper.unlink(self.address) @unittest.skipUnless(socket_helper.IPV6_ENABLED, 'IPv6 support required for this test.') @@ -2175,7 +2177,7 @@ class ConfigDictTest(BaseTest): """Reading logging config from a dictionary.""" - check_no_resource_warning = support.check_no_resource_warning + check_no_resource_warning = warnings_helper.check_no_resource_warning expected_log_pat = r"^(\w+) \+\+ (\w+)$" # config0 is a standard configuration. diff --git a/Lib/test/test_poll.py b/Lib/test/test_poll.py index a14c69a5723a2..de62350696a92 100644 --- a/Lib/test/test_poll.py +++ b/Lib/test/test_poll.py @@ -7,8 +7,10 @@ import threading import time import unittest -from test.support import TESTFN, run_unittest, cpython_only +from test.support import run_unittest, cpython_only from test.support import threading_helper +from test.support.os_helper import TESTFN + try: select.poll diff --git a/Lib/test/test_regrtest.py b/Lib/test/test_regrtest.py index 6745be6fea1ac..39af0d96d1e54 100644 --- a/Lib/test/test_regrtest.py +++ b/Lib/test/test_regrtest.py @@ -18,6 +18,7 @@ import unittest from test import libregrtest from test import support +from test.support import os_helper from test.libregrtest import utils @@ -161,12 +162,12 @@ def test_ignore(self): self.assertEqual(ns.ignore_tests, ['pattern']) self.checkError([opt], 'expected one argument') - self.addCleanup(support.unlink, support.TESTFN) - with open(support.TESTFN, "w") as fp: + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + with open(os_helper.TESTFN, "w") as fp: print('matchfile1', file=fp) print('matchfile2', file=fp) - filename = os.path.abspath(support.TESTFN) + filename = os.path.abspath(os_helper.TESTFN) ns = libregrtest._parse_args(['-m', 'match', '--ignorefile', filename]) self.assertEqual(ns.ignore_tests, @@ -183,12 +184,12 @@ def test_match(self): '-m', 'pattern2']) self.assertEqual(ns.match_tests, ['pattern1', 'pattern2']) - self.addCleanup(support.unlink, support.TESTFN) - with open(support.TESTFN, "w") as fp: + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + with open(os_helper.TESTFN, "w") as fp: print('matchfile1', file=fp) print('matchfile2', file=fp) - filename = os.path.abspath(support.TESTFN) + filename = os.path.abspath(os_helper.TESTFN) ns = libregrtest._parse_args(['-m', 'match', '--matchfile', filename]) self.assertEqual(ns.match_tests, @@ -237,7 +238,7 @@ def test_memlimit(self): def test_testdir(self): ns = libregrtest._parse_args(['--testdir', 'foo']) - self.assertEqual(ns.testdir, os.path.join(support.SAVEDCWD, 'foo')) + self.assertEqual(ns.testdir, os.path.join(os_helper.SAVEDCWD, 'foo')) self.checkError(['--testdir'], 'expected one argument') def test_runleaks(self): @@ -284,7 +285,7 @@ def test_coverdir(self): with self.subTest(opt=opt): ns = libregrtest._parse_args([opt, 'foo']) self.assertEqual(ns.coverdir, - os.path.join(support.SAVEDCWD, 'foo')) + os.path.join(os_helper.SAVEDCWD, 'foo')) self.checkError([opt], 'expected one argument') def test_nocoverdir(self): @@ -363,7 +364,7 @@ def setUp(self): self.testdir = os.path.realpath(os.path.dirname(__file__)) self.tmptestdir = tempfile.mkdtemp() - self.addCleanup(support.rmtree, self.tmptestdir) + self.addCleanup(os_helper.rmtree, self.tmptestdir) def create_test(self, name=None, code=None): if not name: @@ -384,7 +385,7 @@ def test_empty_test(self): name = self.TESTNAME_PREFIX + name path = os.path.join(self.tmptestdir, name + '.py') - self.addCleanup(support.unlink, path) + self.addCleanup(os_helper.unlink, path) # Use 'x' mode to ensure that we do not override existing tests try: with open(path, 'x', encoding='utf-8') as fp: @@ -770,8 +771,8 @@ def test_fromfile(self): # Write the list of files using a format similar to regrtest output: # [1/2] test_1 # [2/2] test_2 - filename = support.TESTFN - self.addCleanup(support.unlink, filename) + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) # test format '0:00:00 [2/7] test_opcodes -- test_grammar took 0 sec' with open(filename, "w") as fp: @@ -886,7 +887,7 @@ def check_leak(self, code, what): test = self.create_test('huntrleaks', code=code) filename = 'reflog.txt' - self.addCleanup(support.unlink, filename) + self.addCleanup(os_helper.unlink, filename) output = self.run_tests('--huntrleaks', '3:3:', test, exitcode=2, stderr=subprocess.STDOUT) @@ -997,8 +998,8 @@ def test_method4(self): testname = self.create_test(code=code) # only run a subset - filename = support.TESTFN - self.addCleanup(support.unlink, filename) + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) subset = [ # only ignore the method name @@ -1038,8 +1039,8 @@ def test_method4(self): self.assertEqual(methods, all_methods) # only run a subset - filename = support.TESTFN - self.addCleanup(support.unlink, filename) + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) subset = [ # only match the method name diff --git a/Lib/test/test_smtpd.py b/Lib/test/test_smtpd.py index 3be7739743940..d5d5abfcf370a 100644 --- a/Lib/test/test_smtpd.py +++ b/Lib/test/test_smtpd.py @@ -2,6 +2,7 @@ import textwrap from test import support, mock_socket from test.support import socket_helper +from test.support import warnings_helper import socket import io import smtpd @@ -714,49 +715,49 @@ def test_unknown_command(self): b'recognized\r\n') def test_attribute_deprecations(self): - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__server - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__server = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__line - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__line = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__state - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__state = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__greeting - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__greeting = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__mailfrom - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__mailfrom = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__rcpttos - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__rcpttos = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__data - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__data = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__fqdn - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__fqdn = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__peer - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__peer = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__conn - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__conn = 'spam' - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): spam = self.channel._SMTPChannel__addr - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): self.channel._SMTPChannel__addr = 'spam' @unittest.skipUnless(socket_helper.IPV6_ENABLED, "IPv6 not enabled") diff --git a/Lib/test/test_time.py b/Lib/test/test_time.py index 80e43fafad813..6ced0470d0756 100644 --- a/Lib/test/test_time.py +++ b/Lib/test/test_time.py @@ -1,4 +1,5 @@ from test import support +from test.support import warnings_helper import decimal import enum import locale @@ -247,7 +248,7 @@ def test_default_values_for_zero(self): # not change output based on its value and no test for year # because systems vary in their support for year 0. expected = "2000 01 01 00 00 00 1 001" - with support.check_warnings(): + with warnings_helper.check_warnings(): result = time.strftime("%Y %m %d %H %M %S %w %j", (2000,)+(0,)*8) self.assertEqual(expected, result) diff --git a/Lib/test/test_unicode_file.py b/Lib/test/test_unicode_file.py index 46a0d062540b7..e397949187983 100644 --- a/Lib/test/test_unicode_file.py +++ b/Lib/test/test_unicode_file.py @@ -6,8 +6,10 @@ import unicodedata import unittest -from test.support import (run_unittest, rmtree, change_cwd, - TESTFN_UNICODE, TESTFN_UNENCODABLE, create_empty_file) +from test.support import run_unittest +from test.support.os_helper import (rmtree, change_cwd, TESTFN_UNICODE, + TESTFN_UNENCODABLE, create_empty_file) + if not os.path.supports_unicode_filenames: try: diff --git a/Lib/test/test_urllib.py b/Lib/test/test_urllib.py index 68bb49efb2810..f41fa2a950686 100644 --- a/Lib/test/test_urllib.py +++ b/Lib/test/test_urllib.py @@ -9,6 +9,8 @@ import unittest from unittest.mock import patch from test import support +from test.support import os_helper +from test.support import warnings_helper import os try: import ssl @@ -50,7 +52,7 @@ def urlopen(url, data=None, proxies=None): def FancyURLopener(): - with support.check_warnings( + with warnings_helper.check_warnings( ('FancyURLopener style of invoking requests is deprecated.', DeprecationWarning)): return urllib.request.FancyURLopener() @@ -145,19 +147,19 @@ def setUp(self): # Create a temp file to use for testing self.text = bytes("test_urllib: %s\n" % self.__class__.__name__, "ascii") - f = open(support.TESTFN, 'wb') + f = open(os_helper.TESTFN, 'wb') try: f.write(self.text) finally: f.close() - self.pathname = support.TESTFN + self.pathname = os_helper.TESTFN self.quoted_pathname = urllib.parse.quote(self.pathname) self.returned_obj = urlopen("file:%s" % self.quoted_pathname) def tearDown(self): """Shut down the open object""" self.returned_obj.close() - os.remove(support.TESTFN) + os.remove(os_helper.TESTFN) def test_interface(self): # Make sure object returned by urlopen() has the specified methods @@ -230,7 +232,7 @@ class ProxyTests(unittest.TestCase): def setUp(self): # Records changes to env vars - self.env = support.EnvironmentVarGuard() + self.env = os_helper.EnvironmentVarGuard() # Delete all proxy related env vars for k in list(os.environ): if 'proxy' in k.lower(): @@ -592,13 +594,13 @@ def test_userpass_inurl_w_spaces(self): self.unfakehttp() def test_URLopener_deprecation(self): - with support.check_warnings(('',DeprecationWarning)): + with warnings_helper.check_warnings(('',DeprecationWarning)): urllib.request.URLopener() @unittest.skipUnless(ssl, "ssl module required") def test_cafile_and_context(self): context = ssl.create_default_context() - with support.check_warnings(('', DeprecationWarning)): + with warnings_helper.check_warnings(('', DeprecationWarning)): with self.assertRaises(ValueError): urllib.request.urlopen( "https://localhost", cafile="/nonexistent/path", context=context @@ -699,10 +701,10 @@ def setUp(self): self.tempFiles = [] # Create a temporary file. - self.registerFileForCleanUp(support.TESTFN) + self.registerFileForCleanUp(os_helper.TESTFN) self.text = b'testing urllib.urlretrieve' try: - FILE = open(support.TESTFN, 'wb') + FILE = open(os_helper.TESTFN, 'wb') FILE.write(self.text) FILE.close() finally: @@ -745,18 +747,18 @@ def registerFileForCleanUp(self, fileName): def test_basic(self): # Make sure that a local file just gets its own location returned and # a headers value is returned. - result = urllib.request.urlretrieve("file:%s" % support.TESTFN) - self.assertEqual(result[0], support.TESTFN) + result = urllib.request.urlretrieve("file:%s" % os_helper.TESTFN) + self.assertEqual(result[0], os_helper.TESTFN) self.assertIsInstance(result[1], email.message.Message, "did not get an email.message.Message instance " "as second returned value") def test_copy(self): # Test that setting the filename argument works. - second_temp = "%s.2" % support.TESTFN + second_temp = "%s.2" % os_helper.TESTFN self.registerFileForCleanUp(second_temp) result = urllib.request.urlretrieve(self.constructLocalFileUrl( - support.TESTFN), second_temp) + os_helper.TESTFN), second_temp) self.assertEqual(second_temp, result[0]) self.assertTrue(os.path.exists(second_temp), "copy of the file was not " "made") @@ -777,10 +779,10 @@ def hooktester(block_count, block_read_size, file_size, count_holder=[0]): self.assertIsInstance(file_size, int) self.assertEqual(block_count, count_holder[0]) count_holder[0] = count_holder[0] + 1 - second_temp = "%s.2" % support.TESTFN + second_temp = "%s.2" % os_helper.TESTFN self.registerFileForCleanUp(second_temp) urllib.request.urlretrieve( - self.constructLocalFileUrl(support.TESTFN), + self.constructLocalFileUrl(os_helper.TESTFN), second_temp, hooktester) def test_reporthook_0_bytes(self): @@ -790,7 +792,7 @@ def hooktester(block_count, block_read_size, file_size, _report=report): _report.append((block_count, block_read_size, file_size)) srcFileName = self.createNewTempFile() urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName), - support.TESTFN, hooktester) + os_helper.TESTFN, hooktester) self.assertEqual(len(report), 1) self.assertEqual(report[0][2], 0) @@ -803,7 +805,7 @@ def hooktester(block_count, block_read_size, file_size, _report=report): _report.append((block_count, block_read_size, file_size)) srcFileName = self.createNewTempFile(b"x" * 5) urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName), - support.TESTFN, hooktester) + os_helper.TESTFN, hooktester) self.assertEqual(len(report), 2) self.assertEqual(report[0][2], 5) self.assertEqual(report[1][2], 5) @@ -817,7 +819,7 @@ def hooktester(block_count, block_read_size, file_size, _report=report): _report.append((block_count, block_read_size, file_size)) srcFileName = self.createNewTempFile(b"x" * 8193) urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName), - support.TESTFN, hooktester) + os_helper.TESTFN, hooktester) self.assertEqual(len(report), 3) self.assertEqual(report[0][2], 8193) self.assertEqual(report[0][1], 8192) @@ -1556,7 +1558,7 @@ def test_quoted_open(self): class DummyURLopener(urllib.request.URLopener): def open_spam(self, url): return url - with support.check_warnings( + with warnings_helper.check_warnings( ('DummyURLopener style of invoking requests is deprecated.', DeprecationWarning)): self.assertEqual(DummyURLopener().open( @@ -1567,9 +1569,9 @@ def open_spam(self, url): "spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"), "//c:|windows%/:=&?~#+!$,;'@()*[]|/path/") - @support.ignore_warnings(category=DeprecationWarning) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test_urlopener_retrieve_file(self): - with support.temp_dir() as tmpdir: + with os_helper.temp_dir() as tmpdir: fd, tmpfile = tempfile.mkstemp(dir=tmpdir) os.close(fd) fileurl = "file:" + urllib.request.pathname2url(tmpfile) @@ -1577,7 +1579,7 @@ def test_urlopener_retrieve_file(self): # Some buildbots have TEMP folder that uses a lowercase drive letter. self.assertEqual(os.path.normcase(filename), os.path.normcase(tmpfile)) - @support.ignore_warnings(category=DeprecationWarning) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test_urlopener_retrieve_remote(self): url = "http://www.python.org/file.txt" self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello!") @@ -1585,7 +1587,7 @@ def test_urlopener_retrieve_remote(self): filename, _ = urllib.request.URLopener().retrieve(url) self.assertEqual(os.path.splitext(filename)[1], ".txt") - @support.ignore_warnings(category=DeprecationWarning) + @warnings_helper.ignore_warnings(category=DeprecationWarning) def test_local_file_open(self): # bpo-35907, CVE-2019-9948: urllib must reject local_file:// scheme class DummyURLopener(urllib.request.URLopener): diff --git a/Lib/test/test_zipimport_support.py b/Lib/test/test_zipimport_support.py index 88561017503ff..7bf50a33728e5 100644 --- a/Lib/test/test_zipimport_support.py +++ b/Lib/test/test_zipimport_support.py @@ -13,6 +13,7 @@ import inspect import linecache import unittest +from test.support import os_helper from test.support.script_helper import (spawn_python, kill_python, assert_python_ok, make_script, make_zip_script) @@ -77,7 +78,7 @@ def tearDown(self): def test_inspect_getsource_issue4223(self): test_src = "def foo(): pass\n" - with test.support.temp_dir() as d: + with os_helper.temp_dir() as d: init_name = make_script(d, '__init__', test_src) name_in_zip = os.path.join('zip_pkg', os.path.basename(init_name)) @@ -117,7 +118,7 @@ def test_doctest_issue4197(self): mod_name = mod_name.replace("sample_", "sample_zipped_") sample_sources[mod_name] = src - with test.support.temp_dir() as d: + with os_helper.temp_dir() as d: script_name = make_script(d, 'test_zipped_doctest', test_src) zip_name, run_name = make_zip_script(d, 'test_zip', @@ -192,7 +193,7 @@ class Test: doctest.testmod() """) pattern = 'File "%s", line 2, in %s' - with test.support.temp_dir() as d: + with os_helper.temp_dir() as d: script_name = make_script(d, 'script', test_src) rc, out, err = assert_python_ok(script_name) expected = pattern % (script_name, "__main__.Test") @@ -219,7 +220,7 @@ def f(): import pdb pdb.Pdb(nosigint=True).runcall(f) """) - with test.support.temp_dir() as d: + with os_helper.temp_dir() as d: script_name = make_script(d, 'script', test_src) p = spawn_python(script_name) p.stdin.write(b'l\n') From webhook-mailer at python.org Tue Jun 30 09:46:36 2020 From: webhook-mailer at python.org (Hai Shi) Date: Tue, 30 Jun 2020 13:46:36 -0000 Subject: [Python-checkins] bpo-40275: Use new test.support helper submodules in tests (GH-21169) Message-ID: https://github.com/python/cpython/commit/0c4f0f3b29d84063700217dcf90ad6860ed71c70 commit: 0c4f0f3b29d84063700217dcf90ad6860ed71c70 branch: master author: Hai Shi committer: GitHub date: 2020-06-30T15:46:31+02:00 summary: bpo-40275: Use new test.support helper submodules in tests (GH-21169) files: M Lib/test/_test_multiprocessing.py M Lib/test/test_base64.py M Lib/test/test_bool.py M Lib/test/test_dict_version.py M Lib/test/test_grammar.py M Lib/test/test_iter.py M Lib/test/test_lzma.py M Lib/test/test_mailcap.py M Lib/test/test_memoryview.py M Lib/test/test_os.py M Lib/test/test_shutil.py M Lib/test/test_subprocess.py M Lib/test/test_support.py M Lib/test/test_tempfile.py diff --git a/Lib/test/_test_multiprocessing.py b/Lib/test/_test_multiprocessing.py index 5f65d966d62ee..6b4679f82da73 100644 --- a/Lib/test/_test_multiprocessing.py +++ b/Lib/test/_test_multiprocessing.py @@ -27,12 +27,13 @@ import test.support.script_helper from test import support from test.support import hashlib_helper +from test.support import import_helper from test.support import socket_helper from test.support import threading_helper # Skip tests if _multiprocessing wasn't built. -_multiprocessing = test.support.import_module('_multiprocessing') +_multiprocessing = import_helper.import_module('_multiprocessing') # Skip tests if sem_open implementation is broken. support.skip_if_broken_multiprocessing_synchronize() import threading diff --git a/Lib/test/test_base64.py b/Lib/test/test_base64.py index 1dbeac41dc0dd..1f67e46cd2267 100644 --- a/Lib/test/test_base64.py +++ b/Lib/test/test_base64.py @@ -1,9 +1,9 @@ import unittest -from test import support import base64 import binascii import os from array import array +from test.support import os_helper from test.support import script_helper @@ -647,8 +647,8 @@ def test_ErrorHeritage(self): class TestMain(unittest.TestCase): def tearDown(self): - if os.path.exists(support.TESTFN): - os.unlink(support.TESTFN) + if os.path.exists(os_helper.TESTFN): + os.unlink(os_helper.TESTFN) def get_output(self, *args): return script_helper.assert_python_ok('-m', 'base64', *args).out @@ -662,9 +662,9 @@ def test_encode_decode(self): )) def test_encode_file(self): - with open(support.TESTFN, 'wb') as fp: + with open(os_helper.TESTFN, 'wb') as fp: fp.write(b'a\xffb\n') - output = self.get_output('-e', support.TESTFN) + output = self.get_output('-e', os_helper.TESTFN) self.assertEqual(output.rstrip(), b'Yf9iCg==') def test_encode_from_stdin(self): @@ -674,9 +674,9 @@ def test_encode_from_stdin(self): self.assertIsNone(err) def test_decode(self): - with open(support.TESTFN, 'wb') as fp: + with open(os_helper.TESTFN, 'wb') as fp: fp.write(b'Yf9iCg==') - output = self.get_output('-d', support.TESTFN) + output = self.get_output('-d', os_helper.TESTFN) self.assertEqual(output.rstrip(), b'a\xffb') if __name__ == '__main__': diff --git a/Lib/test/test_bool.py b/Lib/test/test_bool.py index 4c6fba42c0c57..7b3a3859e0893 100644 --- a/Lib/test/test_bool.py +++ b/Lib/test/test_bool.py @@ -2,6 +2,7 @@ import unittest from test import support +from test.support import os_helper import os @@ -234,11 +235,11 @@ def test_boolean(self): def test_fileclosed(self): try: - with open(support.TESTFN, "w") as f: + with open(os_helper.TESTFN, "w") as f: self.assertIs(f.closed, False) self.assertIs(f.closed, True) finally: - os.remove(support.TESTFN) + os.remove(os_helper.TESTFN) def test_types(self): # types are always true. diff --git a/Lib/test/test_dict_version.py b/Lib/test/test_dict_version.py index b23786514f82e..8cdccad0d79ab 100644 --- a/Lib/test/test_dict_version.py +++ b/Lib/test/test_dict_version.py @@ -2,11 +2,11 @@ Test implementation of the PEP 509: dictionary versionning. """ import unittest -from test import support +from test.support import import_helper # PEP 509 is implemented in CPython but other Python implementations # don't require to implement it -_testcapi = support.import_module('_testcapi') +_testcapi = import_helper.import_module('_testcapi') class DictVersionTests(unittest.TestCase): diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py index ef7d1a15c7570..a51452e739f3a 100644 --- a/Lib/test/test_grammar.py +++ b/Lib/test/test_grammar.py @@ -1,7 +1,8 @@ # Python test set -- part 1, grammar. # This just tests whether the parser accepts them all. -from test.support import check_syntax_error, check_syntax_warning +from test.support import check_syntax_error +from test.support.warnings_helper import check_syntax_warning import inspect import unittest import sys @@ -276,7 +277,8 @@ def __getitem__(self, item): class GrammarTests(unittest.TestCase): - from test.support import check_syntax_error, check_syntax_warning + from test.support import check_syntax_error + from test.support.warnings_helper import check_syntax_warning # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE # XXX can't test in a script -- this rule is only used when interactive diff --git a/Lib/test/test_iter.py b/Lib/test/test_iter.py index 524346939886d..a7658b5f47694 100644 --- a/Lib/test/test_iter.py +++ b/Lib/test/test_iter.py @@ -2,7 +2,8 @@ import sys import unittest -from test.support import run_unittest, TESTFN, unlink, cpython_only +from test.support import run_unittest, cpython_only +from test.support.os_helper import TESTFN, unlink from test.support import check_free_after_iterating, ALWAYS_EQ, NEVER_EQ import pickle import collections.abc diff --git a/Lib/test/test_lzma.py b/Lib/test/test_lzma.py index 0f3af27efa909..c2427f8cd228f 100644 --- a/Lib/test/test_lzma.py +++ b/Lib/test/test_lzma.py @@ -9,7 +9,11 @@ import unittest from test.support import ( - _4G, TESTFN, import_module, bigmemtest, run_unittest, unlink + _4G, bigmemtest, run_unittest +) +from test.support.import_helper import import_module +from test.support.os_helper import ( + TESTFN, unlink ) lzma = import_module("lzma") diff --git a/Lib/test/test_mailcap.py b/Lib/test/test_mailcap.py index c08423c670739..51a0c7da8bb6b 100644 --- a/Lib/test/test_mailcap.py +++ b/Lib/test/test_mailcap.py @@ -2,6 +2,7 @@ import os import copy import test.support +from test.support import os_helper import unittest # Location of mailcap file @@ -74,7 +75,7 @@ def test_listmailcapfiles(self): self.assertIsInstance(mcfiles, list) for m in mcfiles: self.assertIsInstance(m, str) - with test.support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: # According to RFC 1524, if MAILCAPS env variable exists, use that # and only that. if "MAILCAPS" in env: @@ -136,7 +137,7 @@ def test_mock_getcaps(self): # Test mailcap.getcaps() using mock mailcap file in this dir. # Temporarily override any existing system mailcap file by pointing the # MAILCAPS environment variable to our mock file. - with test.support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env["MAILCAPS"] = MAILCAPFILE caps = mailcap.getcaps() self.assertDictEqual(caps, MAILCAPDICT) diff --git a/Lib/test/test_memoryview.py b/Lib/test/test_memoryview.py index ca307d8342f31..d7e3f0c0effa6 100644 --- a/Lib/test/test_memoryview.py +++ b/Lib/test/test_memoryview.py @@ -14,6 +14,8 @@ import copy import pickle +from test.support import import_helper + class AbstractMemoryTests: source_bytes = b"abcdef" @@ -508,7 +510,7 @@ class ArrayMemorySliceSliceTest(unittest.TestCase, class OtherTest(unittest.TestCase): def test_ctypes_cast(self): # Issue 15944: Allow all source formats when casting to bytes. - ctypes = test.support.import_module("ctypes") + ctypes = import_helper.import_module("ctypes") p6 = bytes(ctypes.c_double(0.6)) d = ctypes.c_double() diff --git a/Lib/test/test_os.py b/Lib/test/test_os.py index ef2395d87a605..03152072c1bf5 100644 --- a/Lib/test/test_os.py +++ b/Lib/test/test_os.py @@ -30,8 +30,10 @@ import uuid import warnings from test import support +from test.support import os_helper from test.support import socket_helper from test.support import threading_helper +from test.support import warnings_helper from platform import win32_is_iot try: @@ -57,7 +59,8 @@ INT_MAX = PY_SSIZE_T_MAX = sys.maxsize from test.support.script_helper import assert_python_ok -from test.support import unix_shell, FakePath +from test.support import unix_shell +from test.support.os_helper import FakePath root_in_posix = False @@ -109,7 +112,7 @@ def test_getcwd_long_path(self): dirname = dirname + ('a' * (dirlen - len(dirname))) with tempfile.TemporaryDirectory() as tmpdir: - with support.change_cwd(tmpdir) as path: + with os_helper.change_cwd(tmpdir) as path: expected = path while True: @@ -153,17 +156,17 @@ def test_getcwdb(self): # Tests creating TESTFN class FileTests(unittest.TestCase): def setUp(self): - if os.path.lexists(support.TESTFN): - os.unlink(support.TESTFN) + if os.path.lexists(os_helper.TESTFN): + os.unlink(os_helper.TESTFN) tearDown = setUp def test_access(self): - f = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR) + f = os.open(os_helper.TESTFN, os.O_CREAT|os.O_RDWR) os.close(f) - self.assertTrue(os.access(support.TESTFN, os.W_OK)) + self.assertTrue(os.access(os_helper.TESTFN, os.W_OK)) def test_closerange(self): - first = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR) + first = os.open(os_helper.TESTFN, os.O_CREAT|os.O_RDWR) # We must allocate two consecutive file descriptors, otherwise # it will mess up other file descriptors (perhaps even the three # standard ones). @@ -185,14 +188,14 @@ def test_closerange(self): @support.cpython_only def test_rename(self): - path = support.TESTFN + path = os_helper.TESTFN old = sys.getrefcount(path) self.assertRaises(TypeError, os.rename, path, 0) new = sys.getrefcount(path) self.assertEqual(old, new) def test_read(self): - with open(support.TESTFN, "w+b") as fobj: + with open(os_helper.TESTFN, "w+b") as fobj: fobj.write(b"spam") fobj.flush() fd = fobj.fileno() @@ -208,12 +211,12 @@ def test_read(self): "needs INT_MAX < PY_SSIZE_T_MAX") @support.bigmemtest(size=INT_MAX + 10, memuse=1, dry_run=False) def test_large_read(self, size): - self.addCleanup(support.unlink, support.TESTFN) - create_file(support.TESTFN, b'test') + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + create_file(os_helper.TESTFN, b'test') # Issue #21932: Make sure that os.read() does not raise an # OverflowError for size larger than INT_MAX - with open(support.TESTFN, "rb") as fp: + with open(os_helper.TESTFN, "rb") as fp: data = os.read(fp.fileno(), size) # The test does not try to read more than 2 GiB at once because the @@ -222,13 +225,13 @@ def test_large_read(self, size): def test_write(self): # os.write() accepts bytes- and buffer-like objects but not strings - fd = os.open(support.TESTFN, os.O_CREAT | os.O_WRONLY) + fd = os.open(os_helper.TESTFN, os.O_CREAT | os.O_WRONLY) self.assertRaises(TypeError, os.write, fd, "beans") os.write(fd, b"bacon\n") os.write(fd, bytearray(b"eggs\n")) os.write(fd, memoryview(b"spam\n")) os.close(fd) - with open(support.TESTFN, "rb") as fobj: + with open(os_helper.TESTFN, "rb") as fobj: self.assertEqual(fobj.read().splitlines(), [b"bacon", b"eggs", b"spam"]) @@ -252,12 +255,12 @@ def test_write_windows_console(self): self.write_windows_console(sys.executable, "-u", "-c", code) def fdopen_helper(self, *args): - fd = os.open(support.TESTFN, os.O_RDONLY) + fd = os.open(os_helper.TESTFN, os.O_RDONLY) f = os.fdopen(fd, *args) f.close() def test_fdopen(self): - fd = os.open(support.TESTFN, os.O_CREAT|os.O_RDWR) + fd = os.open(os_helper.TESTFN, os.O_CREAT|os.O_RDWR) os.close(fd) self.fdopen_helper() @@ -265,15 +268,15 @@ def test_fdopen(self): self.fdopen_helper('r', 100) def test_replace(self): - TESTFN2 = support.TESTFN + ".2" - self.addCleanup(support.unlink, support.TESTFN) - self.addCleanup(support.unlink, TESTFN2) + TESTFN2 = os_helper.TESTFN + ".2" + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + self.addCleanup(os_helper.unlink, TESTFN2) - create_file(support.TESTFN, b"1") + create_file(os_helper.TESTFN, b"1") create_file(TESTFN2, b"2") - os.replace(support.TESTFN, TESTFN2) - self.assertRaises(FileNotFoundError, os.stat, support.TESTFN) + os.replace(os_helper.TESTFN, TESTFN2) + self.assertRaises(FileNotFoundError, os.stat, os_helper.TESTFN) with open(TESTFN2, 'r') as f: self.assertEqual(f.read(), "1") @@ -285,7 +288,7 @@ def test_open_keywords(self): def test_symlink_keywords(self): symlink = support.get_attribute(os, "symlink") try: - symlink(src='target', dst=support.TESTFN, + symlink(src='target', dst=os_helper.TESTFN, target_is_directory=False, dir_fd=None) except (NotImplementedError, OSError): pass # No OS support or unprivileged user @@ -297,18 +300,18 @@ def test_copy_file_range_invalid_values(self): @unittest.skipUnless(hasattr(os, 'copy_file_range'), 'test needs os.copy_file_range()') def test_copy_file_range(self): - TESTFN2 = support.TESTFN + ".3" + TESTFN2 = os_helper.TESTFN + ".3" data = b'0123456789' - create_file(support.TESTFN, data) - self.addCleanup(support.unlink, support.TESTFN) + create_file(os_helper.TESTFN, data) + self.addCleanup(os_helper.unlink, os_helper.TESTFN) - in_file = open(support.TESTFN, 'rb') + in_file = open(os_helper.TESTFN, 'rb') self.addCleanup(in_file.close) in_fd = in_file.fileno() out_file = open(TESTFN2, 'w+b') - self.addCleanup(support.unlink, TESTFN2) + self.addCleanup(os_helper.unlink, TESTFN2) self.addCleanup(out_file.close) out_fd = out_file.fileno() @@ -331,21 +334,21 @@ def test_copy_file_range(self): @unittest.skipUnless(hasattr(os, 'copy_file_range'), 'test needs os.copy_file_range()') def test_copy_file_range_offset(self): - TESTFN4 = support.TESTFN + ".4" + TESTFN4 = os_helper.TESTFN + ".4" data = b'0123456789' bytes_to_copy = 6 in_skip = 3 out_seek = 5 - create_file(support.TESTFN, data) - self.addCleanup(support.unlink, support.TESTFN) + create_file(os_helper.TESTFN, data) + self.addCleanup(os_helper.unlink, os_helper.TESTFN) - in_file = open(support.TESTFN, 'rb') + in_file = open(os_helper.TESTFN, 'rb') self.addCleanup(in_file.close) in_fd = in_file.fileno() out_file = open(TESTFN4, 'w+b') - self.addCleanup(support.unlink, TESTFN4) + self.addCleanup(os_helper.unlink, TESTFN4) self.addCleanup(out_file.close) out_fd = out_file.fileno() @@ -377,8 +380,8 @@ def test_copy_file_range_offset(self): # Test attributes on return values from os.*stat* family. class StatAttributeTests(unittest.TestCase): def setUp(self): - self.fname = support.TESTFN - self.addCleanup(support.unlink, self.fname) + self.fname = os_helper.TESTFN + self.addCleanup(os_helper.unlink, self.fname) create_file(self.fname, b"ABC") def check_stat_attributes(self, fname): @@ -563,7 +566,7 @@ def test_file_attributes(self): 0) # test directory st_file_attributes (FILE_ATTRIBUTE_DIRECTORY set) - dirname = support.TESTFN + "dir" + dirname = os_helper.TESTFN + "dir" os.mkdir(dirname) self.addCleanup(os.rmdir, dirname) @@ -580,7 +583,7 @@ def test_access_denied(self): # os.environ['TEMP'] should be located on a volume that # supports file ACLs. fname = os.path.join(os.environ['TEMP'], self.fname) - self.addCleanup(support.unlink, fname) + self.addCleanup(os_helper.unlink, fname) create_file(fname, b'ABC') # Deny the right to [S]YNCHRONIZE on the file to # force CreateFile to fail with ERROR_ACCESS_DENIED. @@ -605,10 +608,10 @@ def test_stat_block_device(self): class UtimeTests(unittest.TestCase): def setUp(self): - self.dirname = support.TESTFN + self.dirname = os_helper.TESTFN self.fname = os.path.join(self.dirname, "f1") - self.addCleanup(support.rmtree, self.dirname) + self.addCleanup(os_helper.rmtree, self.dirname) os.mkdir(self.dirname) create_file(self.fname) @@ -961,7 +964,7 @@ def test_putenv_unsetenv(self): value = "testvalue" code = f'import os; print(repr(os.environ.get({name!r})))' - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env.pop(name, None) os.putenv(name, value) @@ -1132,7 +1135,7 @@ def walk(self, top, **kwargs): def setUp(self): join = os.path.join - self.addCleanup(support.rmtree, support.TESTFN) + self.addCleanup(os_helper.rmtree, os_helper.TESTFN) # Build: # TESTFN/ @@ -1151,7 +1154,7 @@ def setUp(self): # broken_link3 # TEST2/ # tmp4 a lone file - self.walk_path = join(support.TESTFN, "TEST1") + self.walk_path = join(os_helper.TESTFN, "TEST1") self.sub1_path = join(self.walk_path, "SUB1") self.sub11_path = join(self.sub1_path, "SUB11") sub2_path = join(self.walk_path, "SUB2") @@ -1161,8 +1164,8 @@ def setUp(self): tmp3_path = join(sub2_path, "tmp3") tmp5_path = join(sub21_path, "tmp3") self.link_path = join(sub2_path, "link") - t2_path = join(support.TESTFN, "TEST2") - tmp4_path = join(support.TESTFN, "TEST2", "tmp4") + t2_path = join(os_helper.TESTFN, "TEST2") + tmp4_path = join(os_helper.TESTFN, "TEST2", "tmp4") broken_link_path = join(sub2_path, "broken_link") broken_link2_path = join(sub2_path, "broken_link2") broken_link3_path = join(sub2_path, "broken_link3") @@ -1177,7 +1180,7 @@ def setUp(self): with open(path, "x", encoding='utf-8') as f: f.write("I'm " + path + " and proud of it. Blame test_os.\n") - if support.can_symlink(): + if os_helper.can_symlink(): os.symlink(os.path.abspath(t2_path), self.link_path) os.symlink('broken', broken_link_path, True) os.symlink(join('tmp3', 'broken'), broken_link2_path, True) @@ -1260,7 +1263,7 @@ def test_walk_bottom_up(self): self.sub2_tree) def test_walk_symlink(self): - if not support.can_symlink(): + if not os_helper.can_symlink(): self.skipTest("need symlink support") # Walk, following symlinks. @@ -1296,7 +1299,7 @@ def test_walk_bad_dir(self): def test_walk_many_open_files(self): depth = 30 - base = os.path.join(support.TESTFN, 'deep') + base = os.path.join(os_helper.TESTFN, 'deep') p = os.path.join(base, *(['d']*depth)) os.makedirs(p) @@ -1346,13 +1349,13 @@ def _compare_to_walk(self, walk_kwargs, fwalk_kwargs): self.assertEqual(expected[root], (set(dirs), set(files))) def test_compare_to_walk(self): - kwargs = {'top': support.TESTFN} + kwargs = {'top': os_helper.TESTFN} self._compare_to_walk(kwargs, kwargs) def test_dir_fd(self): try: fd = os.open(".", os.O_RDONLY) - walk_kwargs = {'top': support.TESTFN} + walk_kwargs = {'top': os_helper.TESTFN} fwalk_kwargs = walk_kwargs.copy() fwalk_kwargs['dir_fd'] = fd self._compare_to_walk(walk_kwargs, fwalk_kwargs) @@ -1362,7 +1365,7 @@ def test_dir_fd(self): def test_yields_correct_dir_fd(self): # check returned file descriptors for topdown, follow_symlinks in itertools.product((True, False), repeat=2): - args = support.TESTFN, topdown, None + args = os_helper.TESTFN, topdown, None for root, dirs, files, rootfd in self.fwalk(*args, follow_symlinks=follow_symlinks): # check that the FD is valid os.fstat(rootfd) @@ -1378,7 +1381,7 @@ def test_fd_leak(self): minfd = os.dup(1) os.close(minfd) for i in range(256): - for x in self.fwalk(support.TESTFN): + for x in self.fwalk(os_helper.TESTFN): pass newfd = os.dup(1) self.addCleanup(os.close, newfd) @@ -1416,10 +1419,10 @@ def fwalk(self, top='.', *args, **kwargs): class MakedirTests(unittest.TestCase): def setUp(self): - os.mkdir(support.TESTFN) + os.mkdir(os_helper.TESTFN) def test_makedir(self): - base = support.TESTFN + base = os_helper.TESTFN path = os.path.join(base, 'dir1', 'dir2', 'dir3') os.makedirs(path) # Should work path = os.path.join(base, 'dir1', 'dir2', 'dir3', 'dir4') @@ -1434,8 +1437,8 @@ def test_makedir(self): os.makedirs(path) def test_mode(self): - with support.temp_umask(0o002): - base = support.TESTFN + with os_helper.temp_umask(0o002): + base = os_helper.TESTFN parent = os.path.join(base, 'dir1') path = os.path.join(parent, 'dir2') os.makedirs(path, 0o555) @@ -1446,7 +1449,7 @@ def test_mode(self): self.assertEqual(os.stat(parent).st_mode & 0o777, 0o775) def test_exist_ok_existing_directory(self): - path = os.path.join(support.TESTFN, 'dir1') + path = os.path.join(os_helper.TESTFN, 'dir1') mode = 0o777 old_mask = os.umask(0o022) os.makedirs(path, mode) @@ -1460,18 +1463,18 @@ def test_exist_ok_existing_directory(self): os.makedirs(os.path.abspath('/'), exist_ok=True) def test_exist_ok_s_isgid_directory(self): - path = os.path.join(support.TESTFN, 'dir1') + path = os.path.join(os_helper.TESTFN, 'dir1') S_ISGID = stat.S_ISGID mode = 0o777 old_mask = os.umask(0o022) try: existing_testfn_mode = stat.S_IMODE( - os.lstat(support.TESTFN).st_mode) + os.lstat(os_helper.TESTFN).st_mode) try: - os.chmod(support.TESTFN, existing_testfn_mode | S_ISGID) + os.chmod(os_helper.TESTFN, existing_testfn_mode | S_ISGID) except PermissionError: raise unittest.SkipTest('Cannot set S_ISGID for dir.') - if (os.lstat(support.TESTFN).st_mode & S_ISGID != S_ISGID): + if (os.lstat(os_helper.TESTFN).st_mode & S_ISGID != S_ISGID): raise unittest.SkipTest('No support for S_ISGID dir mode.') # The os should apply S_ISGID from the parent dir for us, but # this test need not depend on that behavior. Be explicit. @@ -1487,8 +1490,8 @@ def test_exist_ok_s_isgid_directory(self): os.umask(old_mask) def test_exist_ok_existing_regular_file(self): - base = support.TESTFN - path = os.path.join(support.TESTFN, 'dir1') + base = os_helper.TESTFN + path = os.path.join(os_helper.TESTFN, 'dir1') with open(path, 'w') as f: f.write('abc') self.assertRaises(OSError, os.makedirs, path) @@ -1497,12 +1500,12 @@ def test_exist_ok_existing_regular_file(self): os.remove(path) def tearDown(self): - path = os.path.join(support.TESTFN, 'dir1', 'dir2', 'dir3', + path = os.path.join(os_helper.TESTFN, 'dir1', 'dir2', 'dir3', 'dir4', 'dir5', 'dir6') # If the tests failed, the bottom-most directory ('../dir6') # may not have been created, so we look for the outermost directory # that exists. - while not os.path.exists(path) and path != support.TESTFN: + while not os.path.exists(path) and path != os_helper.TESTFN: path = os.path.dirname(path) os.removedirs(path) @@ -1513,17 +1516,17 @@ class ChownFileTests(unittest.TestCase): @classmethod def setUpClass(cls): - os.mkdir(support.TESTFN) + os.mkdir(os_helper.TESTFN) def test_chown_uid_gid_arguments_must_be_index(self): - stat = os.stat(support.TESTFN) + stat = os.stat(os_helper.TESTFN) uid = stat.st_uid gid = stat.st_gid for value in (-1.0, -1j, decimal.Decimal(-1), fractions.Fraction(-2, 2)): - self.assertRaises(TypeError, os.chown, support.TESTFN, value, gid) - self.assertRaises(TypeError, os.chown, support.TESTFN, uid, value) - self.assertIsNone(os.chown(support.TESTFN, uid, gid)) - self.assertIsNone(os.chown(support.TESTFN, -1, -1)) + self.assertRaises(TypeError, os.chown, os_helper.TESTFN, value, gid) + self.assertRaises(TypeError, os.chown, os_helper.TESTFN, uid, value) + self.assertIsNone(os.chown(os_helper.TESTFN, uid, gid)) + self.assertIsNone(os.chown(os_helper.TESTFN, -1, -1)) @unittest.skipUnless(hasattr(os, 'getgroups'), 'need os.getgroups') def test_chown_gid(self): @@ -1532,61 +1535,61 @@ def test_chown_gid(self): self.skipTest("test needs at least 2 groups") gid_1, gid_2 = groups[:2] - uid = os.stat(support.TESTFN).st_uid + uid = os.stat(os_helper.TESTFN).st_uid - os.chown(support.TESTFN, uid, gid_1) - gid = os.stat(support.TESTFN).st_gid + os.chown(os_helper.TESTFN, uid, gid_1) + gid = os.stat(os_helper.TESTFN).st_gid self.assertEqual(gid, gid_1) - os.chown(support.TESTFN, uid, gid_2) - gid = os.stat(support.TESTFN).st_gid + os.chown(os_helper.TESTFN, uid, gid_2) + gid = os.stat(os_helper.TESTFN).st_gid self.assertEqual(gid, gid_2) @unittest.skipUnless(root_in_posix and len(all_users) > 1, "test needs root privilege and more than one user") def test_chown_with_root(self): uid_1, uid_2 = all_users[:2] - gid = os.stat(support.TESTFN).st_gid - os.chown(support.TESTFN, uid_1, gid) - uid = os.stat(support.TESTFN).st_uid + gid = os.stat(os_helper.TESTFN).st_gid + os.chown(os_helper.TESTFN, uid_1, gid) + uid = os.stat(os_helper.TESTFN).st_uid self.assertEqual(uid, uid_1) - os.chown(support.TESTFN, uid_2, gid) - uid = os.stat(support.TESTFN).st_uid + os.chown(os_helper.TESTFN, uid_2, gid) + uid = os.stat(os_helper.TESTFN).st_uid self.assertEqual(uid, uid_2) @unittest.skipUnless(not root_in_posix and len(all_users) > 1, "test needs non-root account and more than one user") def test_chown_without_permission(self): uid_1, uid_2 = all_users[:2] - gid = os.stat(support.TESTFN).st_gid + gid = os.stat(os_helper.TESTFN).st_gid with self.assertRaises(PermissionError): - os.chown(support.TESTFN, uid_1, gid) - os.chown(support.TESTFN, uid_2, gid) + os.chown(os_helper.TESTFN, uid_1, gid) + os.chown(os_helper.TESTFN, uid_2, gid) @classmethod def tearDownClass(cls): - os.rmdir(support.TESTFN) + os.rmdir(os_helper.TESTFN) class RemoveDirsTests(unittest.TestCase): def setUp(self): - os.makedirs(support.TESTFN) + os.makedirs(os_helper.TESTFN) def tearDown(self): - support.rmtree(support.TESTFN) + os_helper.rmtree(os_helper.TESTFN) def test_remove_all(self): - dira = os.path.join(support.TESTFN, 'dira') + dira = os.path.join(os_helper.TESTFN, 'dira') os.mkdir(dira) dirb = os.path.join(dira, 'dirb') os.mkdir(dirb) os.removedirs(dirb) self.assertFalse(os.path.exists(dirb)) self.assertFalse(os.path.exists(dira)) - self.assertFalse(os.path.exists(support.TESTFN)) + self.assertFalse(os.path.exists(os_helper.TESTFN)) def test_remove_partial(self): - dira = os.path.join(support.TESTFN, 'dira') + dira = os.path.join(os_helper.TESTFN, 'dira') os.mkdir(dira) dirb = os.path.join(dira, 'dirb') os.mkdir(dirb) @@ -1594,10 +1597,10 @@ def test_remove_partial(self): os.removedirs(dirb) self.assertFalse(os.path.exists(dirb)) self.assertTrue(os.path.exists(dira)) - self.assertTrue(os.path.exists(support.TESTFN)) + self.assertTrue(os.path.exists(os_helper.TESTFN)) def test_remove_nothing(self): - dira = os.path.join(support.TESTFN, 'dira') + dira = os.path.join(os_helper.TESTFN, 'dira') os.mkdir(dira) dirb = os.path.join(dira, 'dirb') os.mkdir(dirb) @@ -1606,7 +1609,7 @@ def test_remove_nothing(self): os.removedirs(dirb) self.assertTrue(os.path.exists(dirb)) self.assertTrue(os.path.exists(dira)) - self.assertTrue(os.path.exists(support.TESTFN)) + self.assertTrue(os.path.exists(os_helper.TESTFN)) class DevNullTests(unittest.TestCase): @@ -1744,8 +1747,8 @@ def test_urandom_fd_closed(self): def test_urandom_fd_reopened(self): # Issue #21207: urandom() should detect its fd to /dev/urandom # changed to something else, and reopen it. - self.addCleanup(support.unlink, support.TESTFN) - create_file(support.TESTFN, b"x" * 256) + self.addCleanup(os_helper.unlink, os_helper.TESTFN) + create_file(os_helper.TESTFN, b"x" * 256) code = """if 1: import os @@ -1771,7 +1774,7 @@ def test_urandom_fd_reopened(self): os.dup2(new_fd, fd) sys.stdout.buffer.write(os.urandom(4)) sys.stdout.buffer.write(os.urandom(4)) - """.format(TESTFN=support.TESTFN) + """.format(TESTFN=os_helper.TESTFN) rc, out, err = assert_python_ok('-Sc', code) self.assertEqual(len(out), 8) self.assertNotEqual(out[0:4], out[4:8]) @@ -1923,36 +1926,36 @@ def test_execve_with_empty_path(self): class Win32ErrorTests(unittest.TestCase): def setUp(self): try: - os.stat(support.TESTFN) + os.stat(os_helper.TESTFN) except FileNotFoundError: exists = False except OSError as exc: exists = True self.fail("file %s must not exist; os.stat failed with %s" - % (support.TESTFN, exc)) + % (os_helper.TESTFN, exc)) else: - self.fail("file %s must not exist" % support.TESTFN) + self.fail("file %s must not exist" % os_helper.TESTFN) def test_rename(self): - self.assertRaises(OSError, os.rename, support.TESTFN, support.TESTFN+".bak") + self.assertRaises(OSError, os.rename, os_helper.TESTFN, os_helper.TESTFN+".bak") def test_remove(self): - self.assertRaises(OSError, os.remove, support.TESTFN) + self.assertRaises(OSError, os.remove, os_helper.TESTFN) def test_chdir(self): - self.assertRaises(OSError, os.chdir, support.TESTFN) + self.assertRaises(OSError, os.chdir, os_helper.TESTFN) def test_mkdir(self): - self.addCleanup(support.unlink, support.TESTFN) + self.addCleanup(os_helper.unlink, os_helper.TESTFN) - with open(support.TESTFN, "x") as f: - self.assertRaises(OSError, os.mkdir, support.TESTFN) + with open(os_helper.TESTFN, "x") as f: + self.assertRaises(OSError, os.mkdir, os_helper.TESTFN) def test_utime(self): - self.assertRaises(OSError, os.utime, support.TESTFN, None) + self.assertRaises(OSError, os.utime, os_helper.TESTFN, None) def test_chmod(self): - self.assertRaises(OSError, os.chmod, support.TESTFN, 0) + self.assertRaises(OSError, os.chmod, os_helper.TESTFN, 0) class TestInvalidFD(unittest.TestCase): @@ -1970,7 +1973,7 @@ def helper(self): def check(self, f, *args): try: - f(support.make_bad_fd(), *args) + f(os_helper.make_bad_fd(), *args) except OSError as e: self.assertEqual(e.errno, errno.EBADF) else: @@ -1979,11 +1982,11 @@ def check(self, f, *args): @unittest.skipUnless(hasattr(os, 'isatty'), 'test needs os.isatty()') def test_isatty(self): - self.assertEqual(os.isatty(support.make_bad_fd()), False) + self.assertEqual(os.isatty(os_helper.make_bad_fd()), False) @unittest.skipUnless(hasattr(os, 'closerange'), 'test needs os.closerange()') def test_closerange(self): - fd = support.make_bad_fd() + fd = os_helper.make_bad_fd() # Make sure none of the descriptors we are about to close are # currently valid (issue 6542). for i in range(10): @@ -2057,8 +2060,8 @@ def test_blocking(self): class LinkTests(unittest.TestCase): def setUp(self): - self.file1 = support.TESTFN - self.file2 = os.path.join(support.TESTFN + "2") + self.file1 = os_helper.TESTFN + self.file2 = os.path.join(os_helper.TESTFN + "2") def tearDown(self): for file in (self.file1, self.file2): @@ -2163,12 +2166,12 @@ def test_setregid_neg1(self): @unittest.skipIf(sys.platform == "win32", "Posix specific tests") class Pep383Tests(unittest.TestCase): def setUp(self): - if support.TESTFN_UNENCODABLE: - self.dir = support.TESTFN_UNENCODABLE - elif support.TESTFN_NONASCII: - self.dir = support.TESTFN_NONASCII + if os_helper.TESTFN_UNENCODABLE: + self.dir = os_helper.TESTFN_UNENCODABLE + elif os_helper.TESTFN_NONASCII: + self.dir = os_helper.TESTFN_NONASCII else: - self.dir = support.TESTFN + self.dir = os_helper.TESTFN self.bdir = os.fsencode(self.dir) bytesfn = [] @@ -2178,11 +2181,11 @@ def add_filename(fn): except UnicodeEncodeError: return bytesfn.append(fn) - add_filename(support.TESTFN_UNICODE) - if support.TESTFN_UNENCODABLE: - add_filename(support.TESTFN_UNENCODABLE) - if support.TESTFN_NONASCII: - add_filename(support.TESTFN_NONASCII) + add_filename(os_helper.TESTFN_UNICODE) + if os_helper.TESTFN_UNENCODABLE: + add_filename(os_helper.TESTFN_UNENCODABLE) + if os_helper.TESTFN_NONASCII: + add_filename(os_helper.TESTFN_NONASCII) if not bytesfn: self.skipTest("couldn't create any non-ascii filename") @@ -2190,7 +2193,7 @@ def add_filename(fn): os.mkdir(self.dir) try: for fn in bytesfn: - support.create_empty_file(os.path.join(self.bdir, fn)) + os_helper.create_empty_file(os.path.join(self.bdir, fn)) fn = os.fsdecode(fn) if fn in self.unicodefn: raise ValueError("duplicate filename") @@ -2356,9 +2359,9 @@ def setUp(self): self.created_paths = [] for i in range(2): dir_name = 'SUB%d' % i - dir_path = os.path.join(support.TESTFN, dir_name) + dir_path = os.path.join(os_helper.TESTFN, dir_name) file_name = 'FILE%d' % i - file_path = os.path.join(support.TESTFN, file_name) + file_path = os.path.join(os_helper.TESTFN, file_name) os.makedirs(dir_path) with open(file_path, 'w', encoding='utf-8') as f: f.write("I'm %s and proud of it. Blame test_os.\n" % file_path) @@ -2366,31 +2369,31 @@ def setUp(self): self.created_paths.sort() def tearDown(self): - shutil.rmtree(support.TESTFN) + shutil.rmtree(os_helper.TESTFN) def test_listdir_no_extended_path(self): """Test when the path is not an "extended" path.""" # unicode self.assertEqual( - sorted(os.listdir(support.TESTFN)), + sorted(os.listdir(os_helper.TESTFN)), self.created_paths) # bytes self.assertEqual( - sorted(os.listdir(os.fsencode(support.TESTFN))), + sorted(os.listdir(os.fsencode(os_helper.TESTFN))), [os.fsencode(path) for path in self.created_paths]) def test_listdir_extended_path(self): """Test when the path starts with '\\\\?\\'.""" # See: http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx#maxpath # unicode - path = '\\\\?\\' + os.path.abspath(support.TESTFN) + path = '\\\\?\\' + os.path.abspath(os_helper.TESTFN) self.assertEqual( sorted(os.listdir(path)), self.created_paths) # bytes - path = b'\\\\?\\' + os.fsencode(os.path.abspath(support.TESTFN)) + path = b'\\\\?\\' + os.fsencode(os.path.abspath(os_helper.TESTFN)) self.assertEqual( sorted(os.listdir(path)), [os.fsencode(path) for path in self.created_paths]) @@ -2433,32 +2436,32 @@ def test_missing_link(self): self.assertRaises(FileNotFoundError, os.readlink, FakePath('missing-link')) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_pathlike(self): os.symlink(self.filelink_target, self.filelink) - self.addCleanup(support.unlink, self.filelink) + self.addCleanup(os_helper.unlink, self.filelink) filelink = FakePath(self.filelink) self.assertPathEqual(os.readlink(filelink), self.filelink_target) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_pathlike_bytes(self): os.symlink(self.filelinkb_target, self.filelinkb) - self.addCleanup(support.unlink, self.filelinkb) + self.addCleanup(os_helper.unlink, self.filelinkb) path = os.readlink(FakePath(self.filelinkb)) self.assertPathEqual(path, self.filelinkb_target) self.assertIsInstance(path, bytes) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_bytes(self): os.symlink(self.filelinkb_target, self.filelinkb) - self.addCleanup(support.unlink, self.filelinkb) + self.addCleanup(os_helper.unlink, self.filelinkb) path = os.readlink(self.filelinkb) self.assertPathEqual(path, self.filelinkb_target) self.assertIsInstance(path, bytes) @unittest.skipUnless(sys.platform == "win32", "Win32 specific tests") - at support.skip_unless_symlink + at os_helper.skip_unless_symlink class Win32SymlinkTests(unittest.TestCase): filelink = 'filelinktest' filelink_target = os.path.abspath(__file__) @@ -2529,10 +2532,10 @@ def check_stat(self, link, target): self.assertNotEqual(os.lstat(bytes_link), os.stat(bytes_link)) def test_12084(self): - level1 = os.path.abspath(support.TESTFN) + level1 = os.path.abspath(os_helper.TESTFN) level2 = os.path.join(level1, "level2") level3 = os.path.join(level2, "level3") - self.addCleanup(support.rmtree, level1) + self.addCleanup(os_helper.rmtree, level1) os.mkdir(level1) os.mkdir(level2) @@ -2718,7 +2721,7 @@ def test_getfinalpathname_handles(self): self.assertEqual(0, handle_delta) - at support.skip_unless_symlink + at os_helper.skip_unless_symlink class NonLocalSymlinkTests(unittest.TestCase): def setUp(self): @@ -2857,8 +2860,8 @@ class SpawnTests(unittest.TestCase): def create_args(self, *, with_env=False, use_bytes=False): self.exitcode = 17 - filename = support.TESTFN - self.addCleanup(support.unlink, filename) + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) if not with_env: code = 'import sys; sys.exit(%s)' % self.exitcode @@ -3009,8 +3012,8 @@ def _test_invalid_env(self, spawn): self.assertEqual(exitcode, 127) # equal character in the environment variable value - filename = support.TESTFN - self.addCleanup(support.unlink, filename) + filename = os_helper.TESTFN + self.addCleanup(os_helper.unlink, filename) with open(filename, "w") as fp: fp.write('import sys, os\n' 'if os.getenv("FRUIT") != "orange=lemon":\n' @@ -3165,12 +3168,12 @@ class TestSendfile(unittest.TestCase): @classmethod def setUpClass(cls): cls.key = threading_helper.threading_setup() - create_file(support.TESTFN, cls.DATA) + create_file(os_helper.TESTFN, cls.DATA) @classmethod def tearDownClass(cls): threading_helper.threading_cleanup(*cls.key) - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) def setUp(self): self.server = SendfileTestServer((socket_helper.HOST, 0)) @@ -3181,7 +3184,7 @@ def setUp(self): # synchronize by waiting for "220 ready" response self.client.recv(1024) self.sockno = self.client.fileno() - self.file = open(support.TESTFN, 'rb') + self.file = open(os_helper.TESTFN, 'rb') self.fileno = self.file.fileno() def tearDown(self): @@ -3313,10 +3316,10 @@ def test_headers(self): @requires_headers_trailers def test_trailers(self): - TESTFN2 = support.TESTFN + "2" + TESTFN2 = os_helper.TESTFN + "2" file_data = b"abcdef" - self.addCleanup(support.unlink, TESTFN2) + self.addCleanup(os_helper.unlink, TESTFN2) create_file(TESTFN2, file_data) with open(TESTFN2, 'rb') as f: @@ -3362,13 +3365,13 @@ def supports_extended_attributes(): return False try: - with open(support.TESTFN, "xb", 0) as fp: + with open(os_helper.TESTFN, "xb", 0) as fp: try: os.setxattr(fp.fileno(), b"user.test", b"") except OSError: return False finally: - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) return True @@ -3380,8 +3383,8 @@ def supports_extended_attributes(): class ExtendedAttributeTests(unittest.TestCase): def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwargs): - fn = support.TESTFN - self.addCleanup(support.unlink, fn) + fn = os_helper.TESTFN + self.addCleanup(os_helper.unlink, fn) create_file(fn) with self.assertRaises(OSError) as cm: @@ -3429,10 +3432,10 @@ def _check_xattrs_str(self, s, getxattr, setxattr, removexattr, listxattr, **kwa def _check_xattrs(self, *args, **kwargs): self._check_xattrs_str(str, *args, **kwargs) - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) self._check_xattrs_str(os.fsencode, *args, **kwargs) - support.unlink(support.TESTFN) + os_helper.unlink(os_helper.TESTFN) def test_simple(self): self._check_xattrs(os.getxattr, os.setxattr, os.removexattr, @@ -3531,16 +3534,16 @@ class Str(str): self.bytes_filenames = [] self.unicode_filenames = [] - if support.TESTFN_UNENCODABLE is not None: - decoded = support.TESTFN_UNENCODABLE + if os_helper.TESTFN_UNENCODABLE is not None: + decoded = os_helper.TESTFN_UNENCODABLE else: - decoded = support.TESTFN + decoded = os_helper.TESTFN self.unicode_filenames.append(decoded) self.unicode_filenames.append(Str(decoded)) - if support.TESTFN_UNDECODABLE is not None: - encoded = support.TESTFN_UNDECODABLE + if os_helper.TESTFN_UNDECODABLE is not None: + encoded = os_helper.TESTFN_UNDECODABLE else: - encoded = os.fsencode(support.TESTFN) + encoded = os.fsencode(os_helper.TESTFN) self.bytes_filenames.append(encoded) self.bytes_filenames.append(bytearray(encoded)) self.bytes_filenames.append(memoryview(encoded)) @@ -3734,14 +3737,14 @@ class PathTConverterTests(unittest.TestCase): ] def test_path_t_converter(self): - str_filename = support.TESTFN + str_filename = os_helper.TESTFN if os.name == 'nt': bytes_fspath = bytes_filename = None else: - bytes_filename = os.fsencode(support.TESTFN) + bytes_filename = os.fsencode(os_helper.TESTFN) bytes_fspath = FakePath(bytes_filename) fd = os.open(FakePath(str_filename), os.O_WRONLY|os.O_CREAT) - self.addCleanup(support.unlink, support.TESTFN) + self.addCleanup(os_helper.unlink, os_helper.TESTFN) self.addCleanup(os.close, fd) int_fspath = FakePath(fd) @@ -3811,8 +3814,8 @@ def test_os_all(self): class TestDirEntry(unittest.TestCase): def setUp(self): - self.path = os.path.realpath(support.TESTFN) - self.addCleanup(support.rmtree, self.path) + self.path = os.path.realpath(os_helper.TESTFN) + self.addCleanup(os_helper.rmtree, self.path) os.mkdir(self.path) def test_uninstantiable(self): @@ -3828,12 +3831,12 @@ def test_unpickable(self): class TestScandir(unittest.TestCase): - check_no_resource_warning = support.check_no_resource_warning + check_no_resource_warning = warnings_helper.check_no_resource_warning def setUp(self): - self.path = os.path.realpath(support.TESTFN) + self.path = os.path.realpath(os_helper.TESTFN) self.bytes_path = os.fsencode(self.path) - self.addCleanup(support.rmtree, self.path) + self.addCleanup(os_helper.rmtree, self.path) os.mkdir(self.path) def create_file(self, name="file.txt"): @@ -3903,7 +3906,7 @@ def check_entry(self, entry, name, is_dir, is_file, is_symlink): def test_attributes(self): link = hasattr(os, 'link') - symlink = support.can_symlink() + symlink = os_helper.can_symlink() dirname = os.path.join(self.path, "dir") os.mkdir(dirname) @@ -4027,7 +4030,7 @@ def test_removed_file(self): self.assertRaises(FileNotFoundError, entry.stat, follow_symlinks=False) def test_broken_symlink(self): - if not support.can_symlink(): + if not os_helper.can_symlink(): return self.skipTest('cannot create symbolic link') filename = self.create_file("file.txt") @@ -4081,7 +4084,7 @@ def test_fd(self): self.assertIn(os.scandir, os.supports_fd) self.create_file('file.txt') expected_names = ['file.txt'] - if support.can_symlink(): + if os_helper.can_symlink(): os.symlink('file.txt', os.path.join(self.path, 'link')) expected_names.append('link') diff --git a/Lib/test/test_shutil.py b/Lib/test/test_shutil.py index e56b337083c8f..06ca50af14337 100644 --- a/Lib/test/test_shutil.py +++ b/Lib/test/test_shutil.py @@ -30,7 +30,8 @@ posix = None from test import support -from test.support import TESTFN, FakePath +from test.support import os_helper +from test.support.os_helper import TESTFN, FakePath TESTFN2 = TESTFN + "2" MACOS = sys.platform.startswith("darwin") @@ -140,9 +141,9 @@ def supports_file2file_sendfile(): return True finally: if srcname is not None: - support.unlink(srcname) + os_helper.unlink(srcname) if dstname is not None: - support.unlink(dstname) + os_helper.unlink(dstname) SUPPORTS_SENDFILE = supports_file2file_sendfile() @@ -168,7 +169,7 @@ def mkdtemp(self, prefix=None): Returns the path of the directory. """ d = tempfile.mkdtemp(prefix=prefix, dir=os.getcwd()) - self.addCleanup(support.rmtree, d) + self.addCleanup(os_helper.rmtree, d) return d @@ -183,7 +184,7 @@ def test_rmtree_works_on_bytes(self): self.assertIsInstance(victim, bytes) shutil.rmtree(victim) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_rmtree_fails_on_symlink(self): tmp = self.mkdtemp() dir_ = os.path.join(tmp, 'dir') @@ -202,7 +203,7 @@ def onerror(*args): self.assertEqual(errors[0][1], link) self.assertIsInstance(errors[0][2][1], OSError) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_rmtree_works_on_symlinks(self): tmp = self.mkdtemp() dir1 = os.path.join(tmp, 'dir1') @@ -231,7 +232,7 @@ def test_rmtree_fails_on_junctions(self): os.mkdir(dir_) link = os.path.join(tmp, 'link') _winapi.CreateJunction(dir_, link) - self.addCleanup(support.unlink, link) + self.addCleanup(os_helper.unlink, link) self.assertRaises(OSError, shutil.rmtree, link) self.assertTrue(os.path.exists(dir_)) self.assertTrue(os.path.lexists(link)) @@ -313,7 +314,7 @@ def test_on_error(self): self.child_file_path = os.path.join(TESTFN, 'a') self.child_dir_path = os.path.join(TESTFN, 'b') - support.create_empty_file(self.child_file_path) + os_helper.create_empty_file(self.child_file_path) os.mkdir(self.child_dir_path) old_dir_mode = os.stat(TESTFN).st_mode old_child_file_mode = os.stat(self.child_file_path).st_mode @@ -407,7 +408,7 @@ def test_rmtree_dont_delete_file(self): self.assertRaises(NotADirectoryError, shutil.rmtree, path) os.remove(path) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_rmtree_on_symlink(self): # bug 1669. os.mkdir(TESTFN) @@ -482,7 +483,7 @@ def test_copytree_dirs_exist_ok(self): with self.assertRaises(FileExistsError): shutil.copytree(src_dir, dst_dir, dirs_exist_ok=False) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copytree_symlinks(self): tmp_dir = self.mkdtemp() src_dir = os.path.join(tmp_dir, 'src') @@ -634,7 +635,7 @@ def test_copytree_retains_permissions(self): write_file((src_dir, 'restrictive.txt'), '456') os.chmod(os.path.join(src_dir, 'restrictive.txt'), 0o600) restrictive_subdir = tempfile.mkdtemp(dir=src_dir) - self.addCleanup(support.rmtree, restrictive_subdir) + self.addCleanup(os_helper.rmtree, restrictive_subdir) os.chmod(restrictive_subdir, 0o600) shutil.copytree(src_dir, dst_dir) @@ -681,7 +682,7 @@ def custom_cpfun(a, b): # Issue #3002: copyfile and copytree block indefinitely on named pipes @unittest.skipUnless(hasattr(os, "mkfifo"), 'requires os.mkfifo()') - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copytree_named_pipe(self): os.mkdir(TESTFN) try: @@ -719,7 +720,7 @@ def _copy(src, dst): shutil.copytree(src_dir, dst_dir, copy_function=_copy) self.assertEqual(len(copied), 2) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copytree_dangling_symlinks(self): # a dangling symlink raises an error at the end src_dir = self.mkdtemp() @@ -739,7 +740,7 @@ def test_copytree_dangling_symlinks(self): shutil.copytree(src_dir, dst_dir, symlinks=True) self.assertIn('test.txt', os.listdir(dst_dir)) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copytree_symlink_dir(self): src_dir = self.mkdtemp() dst_dir = os.path.join(self.mkdtemp(), 'destination') @@ -785,7 +786,7 @@ class TestCopy(BaseTest, unittest.TestCase): ### shutil.copymode - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copymode_follow_symlinks(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -818,7 +819,7 @@ def test_copymode_follow_symlinks(self): self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) @unittest.skipUnless(hasattr(os, 'lchmod'), 'requires os.lchmod') - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copymode_symlink_to_symlink(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -848,7 +849,7 @@ def test_copymode_symlink_to_symlink(self): self.assertEqual(os.stat(src).st_mode, os.stat(dst).st_mode) @unittest.skipIf(hasattr(os, 'lchmod'), 'requires os.lchmod to be missing') - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copymode_symlink_to_symlink_wo_lchmod(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -863,7 +864,7 @@ def test_copymode_symlink_to_symlink_wo_lchmod(self): ### shutil.copystat - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copystat_symlinks(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -935,7 +936,7 @@ def _chflags_raiser(path, flags, *, follow_symlinks=True): ### shutil.copyxattr - @support.skip_unless_xattr + @os_helper.skip_unless_xattr def test_copyxattr(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -999,8 +1000,8 @@ def _raise_on_src(fname, *, follow_symlinks=True): self.assertEqual(os.getxattr(dst, 'user.the_value'), b'fiddly') self.assertEqual(os.getxattr(dstro, 'user.the_value'), b'fiddly') - @support.skip_unless_symlink - @support.skip_unless_xattr + @os_helper.skip_unless_symlink + @os_helper.skip_unless_xattr @unittest.skipUnless(hasattr(os, 'geteuid') and os.geteuid() == 0, 'root privileges required') def test_copyxattr_symlinks(self): @@ -1042,7 +1043,7 @@ def test_copy(self): self.assertTrue(os.path.exists(file2)) self.assertEqual(os.stat(file1).st_mode, os.stat(file2).st_mode) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copy_symlinks(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -1084,7 +1085,7 @@ def test_copy2(self): self.assertEqual(getattr(file1_stat, 'st_flags'), getattr(file2_stat, 'st_flags')) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copy2_symlinks(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -1119,7 +1120,7 @@ def test_copy2_symlinks(self): if hasattr(os, 'lchflags') and hasattr(src_link_stat, 'st_flags'): self.assertEqual(src_link_stat.st_flags, dst_stat.st_flags) - @support.skip_unless_xattr + @os_helper.skip_unless_xattr def test_copy2_xattr(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'foo') @@ -1146,7 +1147,7 @@ def test_copy_return_value(self): ### shutil.copyfile - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_copyfile_symlinks(self): tmp_dir = self.mkdtemp() src = os.path.join(tmp_dir, 'src') @@ -1183,7 +1184,7 @@ def test_dont_copy_file_onto_link_to_itself(self): finally: shutil.rmtree(TESTFN, ignore_errors=True) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_dont_copy_file_onto_symlink_to_itself(self): # bug 851123. os.mkdir(TESTFN) @@ -1258,7 +1259,7 @@ def test_make_tarball(self): work_dir = os.path.dirname(tmpdir2) rel_base_name = os.path.join(os.path.basename(tmpdir2), 'archive') - with support.change_cwd(work_dir): + with os_helper.change_cwd(work_dir): base_name = os.path.abspath(rel_base_name) tarball = make_archive(rel_base_name, 'gztar', root_dir, '.') @@ -1272,7 +1273,7 @@ def test_make_tarball(self): './file1', './file2', './sub/file3']) # trying an uncompressed one - with support.change_cwd(work_dir): + with os_helper.change_cwd(work_dir): tarball = make_archive(rel_base_name, 'tar', root_dir, '.') self.assertEqual(tarball, base_name + '.tar') self.assertTrue(os.path.isfile(tarball)) @@ -1347,7 +1348,7 @@ def test_make_zipfile(self): work_dir = os.path.dirname(tmpdir2) rel_base_name = os.path.join(os.path.basename(tmpdir2), 'archive') - with support.change_cwd(work_dir): + with os_helper.change_cwd(work_dir): base_name = os.path.abspath(rel_base_name) res = make_archive(rel_base_name, 'zip', root_dir) @@ -1360,7 +1361,7 @@ def test_make_zipfile(self): 'dist/file1', 'dist/file2', 'dist/sub/file3', 'outer']) - with support.change_cwd(work_dir): + with os_helper.change_cwd(work_dir): base_name = os.path.abspath(rel_base_name) res = make_archive(rel_base_name, 'zip', root_dir, base_dir) @@ -1412,7 +1413,7 @@ def test_unzip_zipfile(self): # now check the ZIP file using `unzip -t` zip_cmd = ['unzip', '-t', archive] - with support.change_cwd(root_dir): + with os_helper.change_cwd(root_dir): try: subprocess.check_output(zip_cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as exc: @@ -1462,7 +1463,7 @@ def test_tarfile_root_owner(self): base_name = os.path.join(self.mkdtemp(), 'archive') group = grp.getgrgid(0)[0] owner = pwd.getpwuid(0)[0] - with support.change_cwd(root_dir): + with os_helper.change_cwd(root_dir): archive_name = make_archive(base_name, 'gztar', root_dir, 'dist', owner=owner, group=group) @@ -1496,7 +1497,7 @@ def _breaks(*args, **kw): def test_make_tarfile_in_curdir(self): # Issue #21280 root_dir = self.mkdtemp() - with support.change_cwd(root_dir): + with os_helper.change_cwd(root_dir): self.assertEqual(make_archive('test', 'tar'), 'test.tar') self.assertTrue(os.path.isfile('test.tar')) @@ -1504,7 +1505,7 @@ def test_make_tarfile_in_curdir(self): def test_make_zipfile_in_curdir(self): # Issue #21280 root_dir = self.mkdtemp() - with support.change_cwd(root_dir): + with os_helper.change_cwd(root_dir): self.assertEqual(make_archive('test', 'zip'), 'test.zip') self.assertTrue(os.path.isfile('test.zip')) @@ -1711,18 +1712,18 @@ def test_relative_cmd(self): # that exists, it should be returned. base_dir, tail_dir = os.path.split(self.dir) relpath = os.path.join(tail_dir, self.file) - with support.change_cwd(path=base_dir): + with os_helper.change_cwd(path=base_dir): rv = shutil.which(relpath, path=self.temp_dir) self.assertEqual(rv, relpath) # But it shouldn't be searched in PATH directories (issue #16957). - with support.change_cwd(path=self.dir): + with os_helper.change_cwd(path=self.dir): rv = shutil.which(relpath, path=base_dir) self.assertIsNone(rv) def test_cwd(self): # Issue #16957 base_dir = os.path.dirname(self.dir) - with support.change_cwd(path=self.dir): + with os_helper.change_cwd(path=self.dir): rv = shutil.which(self.file, path=base_dir) if sys.platform == "win32": # Windows: current directory implicitly on PATH @@ -1743,7 +1744,7 @@ def test_non_matching_mode(self): def test_relative_path(self): base_dir, tail_dir = os.path.split(self.dir) - with support.change_cwd(path=base_dir): + with os_helper.change_cwd(path=base_dir): rv = shutil.which(self.file, path=tail_dir) self.assertEqual(rv, os.path.join(tail_dir, self.file)) @@ -1761,19 +1762,19 @@ def test_pathext_checking(self): self.assertEqual(rv, self.temp_file.name[:-4] + self.ext) def test_environ_path(self): - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env['PATH'] = self.env_path rv = shutil.which(self.file) self.assertEqual(rv, self.temp_file.name) def test_environ_path_empty(self): # PATH='': no match - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env['PATH'] = '' with unittest.mock.patch('os.confstr', return_value=self.dir, \ create=True), \ support.swap_attr(os, 'defpath', self.dir), \ - support.change_cwd(self.dir): + os_helper.change_cwd(self.dir): rv = shutil.which(self.file) self.assertIsNone(rv) @@ -1786,7 +1787,7 @@ def test_environ_path_cwd(self): expected_cwd = os.path.join(curdir, expected_cwd) # PATH=':': explicitly looks in the current directory - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env['PATH'] = os.pathsep with unittest.mock.patch('os.confstr', return_value=self.dir, \ create=True), \ @@ -1795,12 +1796,12 @@ def test_environ_path_cwd(self): self.assertIsNone(rv) # look in current directory - with support.change_cwd(self.dir): + with os_helper.change_cwd(self.dir): rv = shutil.which(self.file) self.assertEqual(rv, expected_cwd) def test_environ_path_missing(self): - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env.pop('PATH', None) # without confstr @@ -1819,14 +1820,14 @@ def test_environ_path_missing(self): def test_empty_path(self): base_dir = os.path.dirname(self.dir) - with support.change_cwd(path=self.dir), \ - support.EnvironmentVarGuard() as env: + with os_helper.change_cwd(path=self.dir), \ + os_helper.EnvironmentVarGuard() as env: env['PATH'] = self.env_path rv = shutil.which(self.file, path='') self.assertIsNone(rv) def test_empty_path_no_PATH(self): - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env.pop('PATH', None) rv = shutil.which(self.file) self.assertIsNone(rv) @@ -1843,7 +1844,7 @@ def test_pathext(self): program = os.path.basename(temp_filexyz.name) program = os.path.splitext(program)[0] - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env['PATHEXT'] = ext rv = shutil.which(program, path=self.temp_dir) self.assertEqual(rv, temp_filexyz.name) @@ -1918,7 +1919,7 @@ def test_move_dir(self): try: self._check_move_dir(self.src_dir, dst_dir, dst_dir) finally: - support.rmtree(dst_dir) + os_helper.rmtree(dst_dir) @mock_rename def test_move_dir_other_fs(self): @@ -1965,7 +1966,7 @@ def test_destinsrc_false_negative(self): msg='_destinsrc() wrongly concluded that ' 'dst (%s) is not in src (%s)' % (dst, src)) finally: - support.rmtree(TESTFN) + os_helper.rmtree(TESTFN) def test_destinsrc_false_positive(self): os.mkdir(TESTFN) @@ -1977,9 +1978,9 @@ def test_destinsrc_false_positive(self): msg='_destinsrc() wrongly concluded that ' 'dst (%s) is in src (%s)' % (dst, src)) finally: - support.rmtree(TESTFN) + os_helper.rmtree(TESTFN) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink @mock_rename def test_move_file_symlink(self): dst = os.path.join(self.src_dir, 'bar') @@ -1988,7 +1989,7 @@ def test_move_file_symlink(self): self.assertTrue(os.path.islink(self.dst_file)) self.assertTrue(os.path.samefile(self.src_file, self.dst_file)) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink @mock_rename def test_move_file_symlink_to_dir(self): filename = "bar" @@ -1999,7 +2000,7 @@ def test_move_file_symlink_to_dir(self): self.assertTrue(os.path.islink(final_link)) self.assertTrue(os.path.samefile(self.src_file, final_link)) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink @mock_rename def test_move_dangling_symlink(self): src = os.path.join(self.src_dir, 'baz') @@ -2010,7 +2011,7 @@ def test_move_dangling_symlink(self): self.assertTrue(os.path.islink(dst_link)) self.assertEqual(os.path.realpath(src), os.path.realpath(dst_link)) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink @mock_rename def test_move_dir_symlink(self): src = os.path.join(self.src_dir, 'baz') @@ -2044,8 +2045,8 @@ def test_move_dir_special_function(self): moved = [] def _copy(src, dst): moved.append((src, dst)) - support.create_empty_file(os.path.join(self.src_dir, 'child')) - support.create_empty_file(os.path.join(self.src_dir, 'child1')) + os_helper.create_empty_file(os.path.join(self.src_dir, 'child')) + os_helper.create_empty_file(os.path.join(self.src_dir, 'child1')) shutil.move(self.src_dir, self.dst_dir, copy_function=_copy) self.assertEqual(len(moved), 3) @@ -2167,11 +2168,11 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - support.unlink(TESTFN) - support.unlink(TESTFN2) + os_helper.unlink(TESTFN) + os_helper.unlink(TESTFN2) def tearDown(self): - support.unlink(TESTFN2) + os_helper.unlink(TESTFN2) @contextlib.contextmanager def get_files(self): @@ -2216,7 +2217,7 @@ def test_win_impl(self): with tempfile.NamedTemporaryFile(dir=os.getcwd(), delete=False) as f: f.write(b'foo') fname = f.name - self.addCleanup(support.unlink, fname) + self.addCleanup(os_helper.unlink, fname) with unittest.mock.patch("shutil._copyfileobj_readinto") as m: shutil.copyfile(fname, TESTFN2) self.assertEqual(m.call_args[0][2], 3) @@ -2225,7 +2226,7 @@ def test_win_impl(self): with tempfile.NamedTemporaryFile(dir=os.getcwd(), delete=False) as f: pass fname = f.name - self.addCleanup(support.unlink, fname) + self.addCleanup(os_helper.unlink, fname) with unittest.mock.patch("shutil._copyfileobj_readinto") as m: shutil.copyfile(fname, TESTFN2) assert not m.called @@ -2247,10 +2248,10 @@ def setUpClass(cls): @classmethod def tearDownClass(cls): - support.unlink(TESTFN) + os_helper.unlink(TESTFN) def tearDown(self): - support.unlink(TESTFN2) + os_helper.unlink(TESTFN2) @contextlib.contextmanager def get_files(self): @@ -2296,8 +2297,8 @@ def test_non_existent_src(self): def test_empty_file(self): srcname = TESTFN + 'src' dstname = TESTFN + 'dst' - self.addCleanup(lambda: support.unlink(srcname)) - self.addCleanup(lambda: support.unlink(dstname)) + self.addCleanup(lambda: os_helper.unlink(srcname)) + self.addCleanup(lambda: os_helper.unlink(dstname)) with open(srcname, "wb"): pass @@ -2421,9 +2422,9 @@ def test_blocksize_arg(self): # sendfile() are the same. self.assertEqual(blocksize, os.path.getsize(TESTFN)) # ...unless we're dealing with a small file. - support.unlink(TESTFN2) + os_helper.unlink(TESTFN2) write_file(TESTFN2, b"hello", binary=True) - self.addCleanup(support.unlink, TESTFN2 + '3') + self.addCleanup(os_helper.unlink, TESTFN2 + '3') self.assertRaises(ZeroDivisionError, shutil.copyfile, TESTFN2, TESTFN2 + '3') blocksize = m.call_args[0][3] @@ -2473,20 +2474,20 @@ def test_does_not_crash(self): def test_os_environ_first(self): "Check if environment variables have precedence" - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env['COLUMNS'] = '777' del env['LINES'] size = shutil.get_terminal_size() self.assertEqual(size.columns, 777) - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: del env['COLUMNS'] env['LINES'] = '888' size = shutil.get_terminal_size() self.assertEqual(size.lines, 888) def test_bad_environ(self): - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: env['COLUMNS'] = 'xxx' env['LINES'] = 'yyy' size = shutil.get_terminal_size() @@ -2510,7 +2511,7 @@ def test_stty_match(self): self.skipTest("stty invocation failed") expected = (int(size[1]), int(size[0])) # reversed order - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: del env['LINES'] del env['COLUMNS'] actual = shutil.get_terminal_size() @@ -2518,7 +2519,7 @@ def test_stty_match(self): self.assertEqual(expected, actual) def test_fallback(self): - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: del env['LINES'] del env['COLUMNS'] diff --git a/Lib/test/test_subprocess.py b/Lib/test/test_subprocess.py index aced87694cf7b..0162424e2fd6b 100644 --- a/Lib/test/test_subprocess.py +++ b/Lib/test/test_subprocess.py @@ -1,6 +1,9 @@ import unittest from unittest import mock from test import support +from test.support import import_helper +from test.support import os_helper +from test.support import warnings_helper import subprocess import sys import signal @@ -20,7 +23,7 @@ import gc import textwrap import json -from test.support import FakePath +from test.support.os_helper import FakePath try: import _testcapi @@ -357,7 +360,7 @@ def _normalize_cwd(self, cwd): # Normalize an expected cwd (for Tru64 support). # We can't use os.path.realpath since it doesn't expand Tru64 {memb} # strings. See bug #1063571. - with support.change_cwd(cwd): + with os_helper.change_cwd(cwd): return os.getcwd() # For use in the test_cwd* tests below. @@ -406,7 +409,7 @@ def test_cwd_with_relative_arg(self): # is relative. python_dir, python_base = self._split_python_path() rel_python = os.path.join(os.curdir, python_base) - with support.temp_cwd() as wrong_dir: + with os_helper.temp_cwd() as wrong_dir: # Before calling with the correct cwd, confirm that the call fails # without cwd and with the wrong cwd. self.assertRaises(FileNotFoundError, subprocess.Popen, @@ -423,7 +426,7 @@ def test_cwd_with_relative_executable(self): python_dir, python_base = self._split_python_path() rel_python = os.path.join(os.curdir, python_base) doesntexist = "somethingyoudonthave" - with support.temp_cwd() as wrong_dir: + with os_helper.temp_cwd() as wrong_dir: # Before calling with the correct cwd, confirm that the call fails # without cwd and with the wrong cwd. self.assertRaises(FileNotFoundError, subprocess.Popen, @@ -441,7 +444,7 @@ def test_cwd_with_absolute_arg(self): python_dir, python_base = self._split_python_path() abs_python = os.path.join(python_dir, python_base) rel_python = os.path.join(os.curdir, python_base) - with support.temp_dir() as wrong_dir: + with os_helper.temp_dir() as wrong_dir: # Before calling with an absolute path, confirm that using a # relative path fails. self.assertRaises(FileNotFoundError, subprocess.Popen, @@ -1052,7 +1055,7 @@ def test_no_leaking(self): try: for i in range(max_handles): try: - tmpfile = os.path.join(tmpdir, support.TESTFN) + tmpfile = os.path.join(tmpdir, os_helper.TESTFN) handles.append(os.open(tmpfile, os.O_WRONLY|os.O_CREAT)) except OSError as e: if e.errno != errno.EMFILE: @@ -2881,7 +2884,7 @@ def test_wait_when_sigchild_ignored(self): def test_select_unbuffered(self): # Issue #11459: bufsize=0 should really set the pipes as # unbuffered (and therefore let select() work properly). - select = support.import_module("select") + select = import_helper.import_module("select") p = subprocess.Popen([sys.executable, "-c", 'import sys;' 'sys.stdout.write("apple")'], @@ -2909,7 +2912,7 @@ def test_zombie_fast_process_del(self): self.addCleanup(p.stderr.close) ident = id(p) pid = p.pid - with support.check_warnings(('', ResourceWarning)): + with warnings_helper.check_warnings(('', ResourceWarning)): p = None if mswindows: @@ -2934,7 +2937,7 @@ def test_leak_fast_process_del_killed(self): self.addCleanup(p.stderr.close) ident = id(p) pid = p.pid - with support.check_warnings(('', ResourceWarning)): + with warnings_helper.check_warnings(('', ResourceWarning)): p = None os.kill(pid, signal.SIGKILL) @@ -3288,7 +3291,8 @@ def test_close_fds_with_stdio(self): self.assertIn(b"OSError", stderr) # Check for a warning due to using handle_list and close_fds=False - with support.check_warnings((".*overriding close_fds", RuntimeWarning)): + with warnings_helper.check_warnings((".*overriding close_fds", + RuntimeWarning)): startupinfo = subprocess.STARTUPINFO() startupinfo.lpAttributeList = {"handle_list": handles[:]} p = subprocess.Popen([sys.executable, "-c", diff --git a/Lib/test/test_support.py b/Lib/test/test_support.py index a7d5b1bfe4eaf..b268511844b82 100644 --- a/Lib/test/test_support.py +++ b/Lib/test/test_support.py @@ -12,20 +12,24 @@ import time import unittest from test import support +from test.support import import_helper +from test.support import os_helper from test.support import script_helper from test.support import socket_helper +from test.support import warnings_helper -TESTFN = support.TESTFN +TESTFN = os_helper.TESTFN class TestSupport(unittest.TestCase): def test_import_module(self): - support.import_module("ftplib") - self.assertRaises(unittest.SkipTest, support.import_module, "foo") + import_helper.import_module("ftplib") + self.assertRaises(unittest.SkipTest, + import_helper.import_module, "foo") def test_import_fresh_module(self): - support.import_fresh_module("ftplib") + import_helper.import_fresh_module("ftplib") def test_get_attribute(self): self.assertEqual(support.get_attribute(self, "test_get_attribute"), @@ -39,38 +43,38 @@ def test_get_original_stdout(self): def test_unload(self): import sched self.assertIn("sched", sys.modules) - support.unload("sched") + import_helper.unload("sched") self.assertNotIn("sched", sys.modules) def test_unlink(self): with open(TESTFN, "w") as f: pass - support.unlink(TESTFN) + os_helper.unlink(TESTFN) self.assertFalse(os.path.exists(TESTFN)) - support.unlink(TESTFN) + os_helper.unlink(TESTFN) def test_rmtree(self): - dirpath = support.TESTFN + 'd' + dirpath = os_helper.TESTFN + 'd' subdirpath = os.path.join(dirpath, 'subdir') os.mkdir(dirpath) os.mkdir(subdirpath) - support.rmtree(dirpath) + os_helper.rmtree(dirpath) self.assertFalse(os.path.exists(dirpath)) with support.swap_attr(support, 'verbose', 0): - support.rmtree(dirpath) + os_helper.rmtree(dirpath) os.mkdir(dirpath) os.mkdir(subdirpath) os.chmod(dirpath, stat.S_IRUSR|stat.S_IXUSR) with support.swap_attr(support, 'verbose', 0): - support.rmtree(dirpath) + os_helper.rmtree(dirpath) self.assertFalse(os.path.exists(dirpath)) os.mkdir(dirpath) os.mkdir(subdirpath) os.chmod(dirpath, 0) with support.swap_attr(support, 'verbose', 0): - support.rmtree(dirpath) + os_helper.rmtree(dirpath) self.assertFalse(os.path.exists(dirpath)) def test_forget(self): @@ -83,12 +87,12 @@ def test_forget(self): mod = __import__(TESTFN) self.assertIn(TESTFN, sys.modules) - support.forget(TESTFN) + import_helper.forget(TESTFN) self.assertNotIn(TESTFN, sys.modules) finally: del sys.path[0] - support.unlink(mod_filename) - support.rmtree('__pycache__') + os_helper.unlink(mod_filename) + os_helper.rmtree('__pycache__') def test_HOST(self): s = socket.create_server((socket_helper.HOST, 0)) @@ -115,23 +119,23 @@ def test_temp_dir(self): try: path = os.path.join(parent_dir, 'temp') self.assertFalse(os.path.isdir(path)) - with support.temp_dir(path) as temp_path: + with os_helper.temp_dir(path) as temp_path: self.assertEqual(temp_path, path) self.assertTrue(os.path.isdir(path)) self.assertFalse(os.path.isdir(path)) finally: - support.rmtree(parent_dir) + os_helper.rmtree(parent_dir) def test_temp_dir__path_none(self): """Test passing no path.""" - with support.temp_dir() as temp_path: + with os_helper.temp_dir() as temp_path: self.assertTrue(os.path.isdir(temp_path)) self.assertFalse(os.path.isdir(temp_path)) def test_temp_dir__existing_dir__quiet_default(self): """Test passing a directory that already exists.""" def call_temp_dir(path): - with support.temp_dir(path) as temp_path: + with os_helper.temp_dir(path) as temp_path: raise Exception("should not get here") path = tempfile.mkdtemp() @@ -150,8 +154,8 @@ def test_temp_dir__existing_dir__quiet_true(self): path = os.path.realpath(path) try: - with support.check_warnings() as recorder: - with support.temp_dir(path, quiet=True) as temp_path: + with warnings_helper.check_warnings() as recorder: + with os_helper.temp_dir(path, quiet=True) as temp_path: self.assertEqual(path, temp_path) warnings = [str(w.message) for w in recorder.warnings] # Make sure temp_dir did not delete the original directory. @@ -173,7 +177,8 @@ def test_temp_dir__forked_child(self): script_helper.assert_python_ok("-c", textwrap.dedent(""" import os from test import support - with support.temp_cwd() as temp_path: + from test.support import os_helper + with os_helper.temp_cwd() as temp_path: pid = os.fork() if pid != 0: # parent process @@ -194,8 +199,8 @@ def test_temp_dir__forked_child(self): def test_change_cwd(self): original_cwd = os.getcwd() - with support.temp_dir() as temp_path: - with support.change_cwd(temp_path) as new_cwd: + with os_helper.temp_dir() as temp_path: + with os_helper.change_cwd(temp_path) as new_cwd: self.assertEqual(new_cwd, temp_path) self.assertEqual(os.getcwd(), new_cwd) @@ -206,10 +211,10 @@ def test_change_cwd__non_existent_dir(self): original_cwd = os.getcwd() def call_change_cwd(path): - with support.change_cwd(path) as new_cwd: + with os_helper.change_cwd(path) as new_cwd: raise Exception("should not get here") - with support.temp_dir() as parent_dir: + with os_helper.temp_dir() as parent_dir: non_existent_dir = os.path.join(parent_dir, 'does_not_exist') self.assertRaises(FileNotFoundError, call_change_cwd, non_existent_dir) @@ -220,10 +225,10 @@ def test_change_cwd__non_existent_dir__quiet_true(self): """Test passing a non-existent directory with quiet=True.""" original_cwd = os.getcwd() - with support.temp_dir() as parent_dir: + with os_helper.temp_dir() as parent_dir: bad_dir = os.path.join(parent_dir, 'does_not_exist') - with support.check_warnings() as recorder: - with support.change_cwd(bad_dir, quiet=True) as new_cwd: + with warnings_helper.check_warnings() as recorder: + with os_helper.change_cwd(bad_dir, quiet=True) as new_cwd: self.assertEqual(new_cwd, original_cwd) self.assertEqual(os.getcwd(), new_cwd) warnings = [str(w.message) for w in recorder.warnings] @@ -240,8 +245,8 @@ def test_change_cwd__non_existent_dir__quiet_true(self): def test_change_cwd__chdir_warning(self): """Check the warning message when os.chdir() fails.""" path = TESTFN + '_does_not_exist' - with support.check_warnings() as recorder: - with support.change_cwd(path=path, quiet=True): + with warnings_helper.check_warnings() as recorder: + with os_helper.change_cwd(path=path, quiet=True): pass messages = [str(w.message) for w in recorder.warnings] @@ -256,7 +261,7 @@ def test_change_cwd__chdir_warning(self): def test_temp_cwd(self): here = os.getcwd() - with support.temp_cwd(name=TESTFN): + with os_helper.temp_cwd(name=TESTFN): self.assertEqual(os.path.basename(os.getcwd()), TESTFN) self.assertFalse(os.path.exists(TESTFN)) self.assertEqual(os.getcwd(), here) @@ -265,7 +270,7 @@ def test_temp_cwd(self): def test_temp_cwd__name_none(self): """Test passing None to temp_cwd().""" original_cwd = os.getcwd() - with support.temp_cwd(name=None) as new_cwd: + with os_helper.temp_cwd(name=None) as new_cwd: self.assertNotEqual(new_cwd, original_cwd) self.assertTrue(os.path.isdir(new_cwd)) self.assertEqual(os.getcwd(), new_cwd) @@ -275,7 +280,7 @@ def test_sortdict(self): self.assertEqual(support.sortdict({3:3, 2:2, 1:1}), "{1: 1, 2: 2, 3: 3}") def test_make_bad_fd(self): - fd = support.make_bad_fd() + fd = os_helper.make_bad_fd() with self.assertRaises(OSError) as cm: os.write(fd, b"foo") self.assertEqual(cm.exception.errno, errno.EBADF) @@ -287,11 +292,11 @@ def test_check_syntax_error(self): def test_CleanImport(self): import importlib - with support.CleanImport("asyncore"): + with import_helper.CleanImport("asyncore"): importlib.import_module("asyncore") def test_DirsOnSysPath(self): - with support.DirsOnSysPath('foo', 'bar'): + with import_helper.DirsOnSysPath('foo', 'bar'): self.assertIn("foo", sys.path) self.assertIn("bar", sys.path) self.assertNotIn("foo", sys.path) @@ -625,10 +630,10 @@ def test_fd_count(self): # We cannot test the absolute value of fd_count(): on old Linux # kernel or glibc versions, os.urandom() keeps a FD open on # /dev/urandom device and Python has 4 FD opens instead of 3. - start = support.fd_count() + start = os_helper.fd_count() fd = os.open(__file__, os.O_RDONLY) try: - more = support.fd_count() + more = os_helper.fd_count() finally: os.close(fd) self.assertEqual(more - start, 1) diff --git a/Lib/test/test_tempfile.py b/Lib/test/test_tempfile.py index fcc706ede5aaa..8ace883d74bb2 100644 --- a/Lib/test/test_tempfile.py +++ b/Lib/test/test_tempfile.py @@ -15,7 +15,9 @@ import unittest from test import support +from test.support import os_helper from test.support import script_helper +from test.support import warnings_helper has_textmode = (tempfile._text_openflags != tempfile._bin_openflags) @@ -69,7 +71,7 @@ class BaseTestCase(unittest.TestCase): b_check = re.compile(br"^[a-z0-9_-]{8}$") def setUp(self): - self._warnings_manager = support.check_warnings() + self._warnings_manager = warnings_helper.check_warnings() self._warnings_manager.__enter__() warnings.filterwarnings("ignore", category=RuntimeWarning, message="mktemp", module=__name__) @@ -224,7 +226,7 @@ def test_wanted_dirs(self): # _candidate_tempdir_list contains the expected directories # Make sure the interesting environment variables are all set. - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: for envname in 'TMPDIR', 'TEMP', 'TMP': dirname = os.getenv(envname) if not dirname: @@ -310,7 +312,7 @@ def _inside_empty_temp_dir(): with support.swap_attr(tempfile, 'tempdir', dir): yield finally: - support.rmtree(dir) + os_helper.rmtree(dir) def _mock_candidate_names(*names): @@ -594,13 +596,13 @@ def test_case_sensitive(self): case_sensitive_tempdir = tempfile.mkdtemp("-Temp") _tempdir, tempfile.tempdir = tempfile.tempdir, None try: - with support.EnvironmentVarGuard() as env: + with os_helper.EnvironmentVarGuard() as env: # Fake the first env var which is checked as a candidate env["TMPDIR"] = case_sensitive_tempdir self.assertEqual(tempfile.gettempdir(), case_sensitive_tempdir) finally: tempfile.tempdir = _tempdir - support.rmdir(case_sensitive_tempdir) + os_helper.rmdir(case_sensitive_tempdir) class TestMkstemp(BaseTestCase): @@ -950,7 +952,7 @@ def close(fd): def test_bad_mode(self): dir = tempfile.mkdtemp() - self.addCleanup(support.rmtree, dir) + self.addCleanup(os_helper.rmtree, dir) with self.assertRaises(ValueError): tempfile.NamedTemporaryFile(mode='wr', dir=dir) with self.assertRaises(TypeError): @@ -1351,7 +1353,7 @@ def test_explicit_cleanup(self): finally: os.rmdir(dir) - @support.skip_unless_symlink + @os_helper.skip_unless_symlink def test_cleanup_with_symlink_to_a_directory(self): # cleanup() should not follow symlinks to directories (issue #12464) d1 = self.do_create() @@ -1448,7 +1450,9 @@ def test_warnings_on_cleanup(self): name = d.name # Check for the resource warning - with support.check_warnings(('Implicitly', ResourceWarning), quiet=False): + with warnings_helper.check_warnings(('Implicitly', + ResourceWarning), + quiet=False): warnings.filterwarnings("always", category=ResourceWarning) del d support.gc_collect() From webhook-mailer at python.org Tue Jun 30 14:59:05 2020 From: webhook-mailer at python.org (Stefan Krah) Date: Tue, 30 Jun 2020 18:59:05 -0000 Subject: [Python-checkins] bpo-41161 Add news entry for libmpdec-2.5.0 (GH-21243) Message-ID: https://github.com/python/cpython/commit/1648c99932f39f1c60972bb114e6a7bd65523818 commit: 1648c99932f39f1c60972bb114e6a7bd65523818 branch: master author: Stefan Krah committer: GitHub date: 2020-06-30T20:58:57+02:00 summary: bpo-41161 Add news entry for libmpdec-2.5.0 (GH-21243) files: A Misc/NEWS.d/next/Library/2020-06-30-20-50-51.bpo-41161.QTdJjz.rst diff --git a/Misc/NEWS.d/next/Library/2020-06-30-20-50-51.bpo-41161.QTdJjz.rst b/Misc/NEWS.d/next/Library/2020-06-30-20-50-51.bpo-41161.QTdJjz.rst new file mode 100644 index 0000000000000..0d8fb521bad50 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-30-20-50-51.bpo-41161.QTdJjz.rst @@ -0,0 +1,2 @@ +The decimal module now requires libmpdec-2.5.0. Users of +--with-system-libmpdec should update their system library. From webhook-mailer at python.org Tue Jun 30 15:20:26 2020 From: webhook-mailer at python.org (Tapas Kundu) Date: Tue, 30 Jun 2020 19:20:26 -0000 Subject: [Python-checkins] [3.7] bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) (GH-21231) Message-ID: https://github.com/python/cpython/commit/b98e7790c77a4378ec4b1c71b84138cb930b69b7 commit: b98e7790c77a4378ec4b1c71b84138cb930b69b7 branch: 3.7 author: Tapas Kundu <39723251+tapakund at users.noreply.github.com> committer: GitHub date: 2020-06-30T15:20:21-04:00 summary: [3.7] bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) (GH-21231) CVE-2020-14422 The __hash__() methods of classes IPv4Interface and IPv6Interface had issue of generating constant hash values of 32 and 128 respectively causing hash collisions. The fix uses the hash() function to generate hash values for the objects instead of XOR operation (cherry picked from commit b30ee26e366bf509b7538d79bfec6c6d38d53f28) Co-authored-by: Ravi Teja P Signed-off-by: Tapas Kundu files: A Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst M Lib/ipaddress.py M Lib/test/test_ipaddress.py diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 80249288d73ab..54882934c3dc1 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1442,7 +1442,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ @@ -2088,7 +2088,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index 455b893fb126f..1fb6a929dc2d9 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -2091,6 +2091,17 @@ def testsixtofour(self): sixtofouraddr.sixtofour) self.assertFalse(bad_addr.sixtofour) + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV4HashIsNotConstant(self): + ipv4_address1 = ipaddress.IPv4Interface("1.2.3.4") + ipv4_address2 = ipaddress.IPv4Interface("2.3.4.5") + self.assertNotEqual(ipv4_address1.__hash__(), ipv4_address2.__hash__()) + + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV6HashIsNotConstant(self): + ipv6_address1 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:1") + ipv6_address2 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:2") + self.assertNotEqual(ipv6_address1.__hash__(), ipv6_address2.__hash__()) if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst new file mode 100644 index 0000000000000..f5a9db52fff52 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst @@ -0,0 +1 @@ +CVE-2020-14422: The __hash__() methods of ipaddress.IPv4Interface and ipaddress.IPv6Interface incorrectly generated constant hash values of 32 and 128 respectively. This resulted in always causing hash collisions. The fix uses hash() to generate hash values for the tuple of (address, mask length, network address). From webhook-mailer at python.org Tue Jun 30 15:30:30 2020 From: webhook-mailer at python.org (Tapas Kundu) Date: Tue, 30 Jun 2020 19:30:30 -0000 Subject: [Python-checkins] [3.6] bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) (GH-21232) Message-ID: https://github.com/python/cpython/commit/cfc7ff8d05f7a949a88b8a8dd506fb5c1c30d3e9 commit: cfc7ff8d05f7a949a88b8a8dd506fb5c1c30d3e9 branch: 3.6 author: Tapas Kundu <39723251+tapakund at users.noreply.github.com> committer: GitHub date: 2020-06-30T15:30:22-04:00 summary: [3.6] bpo-41004: Resolve hash collisions for IPv4Interface and IPv6Interface (GH-21033) (GH-21232) CVE-2020-14422 The __hash__() methods of classes IPv4Interface and IPv6Interface had issue of generating constant hash values of 32 and 128 respectively causing hash collisions. The fix uses the hash() function to generate hash values for the objects instead of XOR operation (cherry picked from commit b30ee26e366bf509b7538d79bfec6c6d38d53f28) Co-authored-by: Ravi Teja P Signed-off-by: Tapas Kundu files: A Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst M Lib/ipaddress.py M Lib/test/test_ipaddress.py diff --git a/Lib/ipaddress.py b/Lib/ipaddress.py index 583f02ad54275..98492136ca5f4 100644 --- a/Lib/ipaddress.py +++ b/Lib/ipaddress.py @@ -1418,7 +1418,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ @@ -2092,7 +2092,7 @@ def __lt__(self, other): return False def __hash__(self): - return self._ip ^ self._prefixlen ^ int(self.network.network_address) + return hash((self._ip, self._prefixlen, int(self.network.network_address))) __reduce__ = _IPAddressBase.__reduce__ diff --git a/Lib/test/test_ipaddress.py b/Lib/test/test_ipaddress.py index 1cef4217bc883..7de444af4aa57 100644 --- a/Lib/test/test_ipaddress.py +++ b/Lib/test/test_ipaddress.py @@ -1990,6 +1990,17 @@ def testsixtofour(self): sixtofouraddr.sixtofour) self.assertFalse(bad_addr.sixtofour) + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV4HashIsNotConstant(self): + ipv4_address1 = ipaddress.IPv4Interface("1.2.3.4") + ipv4_address2 = ipaddress.IPv4Interface("2.3.4.5") + self.assertNotEqual(ipv4_address1.__hash__(), ipv4_address2.__hash__()) + + # issue41004 Hash collisions in IPv4Interface and IPv6Interface + def testV6HashIsNotConstant(self): + ipv6_address1 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:1") + ipv6_address2 = ipaddress.IPv6Interface("2001:658:22a:cafe:200:0:0:2") + self.assertNotEqual(ipv6_address1.__hash__(), ipv6_address2.__hash__()) if __name__ == '__main__': unittest.main() diff --git a/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst new file mode 100644 index 0000000000000..f5a9db52fff52 --- /dev/null +++ b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst @@ -0,0 +1 @@ +CVE-2020-14422: The __hash__() methods of ipaddress.IPv4Interface and ipaddress.IPv6Interface incorrectly generated constant hash values of 32 and 128 respectively. This resulted in always causing hash collisions. The fix uses hash() to generate hash values for the tuple of (address, mask length, network address). From webhook-mailer at python.org Fri Jun 5 15:56:40 2020 From: webhook-mailer at python.org (Furkan =?utf-8?q?=C3=96nder?=) Date: Fri, 05 Jun 2020 19:56:40 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-19468: delete unnecessary instance check in importlib.reload() (GH-19424) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/fef1fae9df3b03510f9defb25bd0388135b4= c591 commit: fef1fae9df3b03510f9defb25bd0388135b4c591 branch: master author: Furkan =C3=96nder committer: GitHub date: 2020-06-05T12:56:32-07:00 summary: bpo-19468: delete unnecessary instance check in importlib.reload() (GH-19424) Automerge-Triggered-By: @brettcannon files: A Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-18-35.bpo-19468.S-TA7p.rst M Lib/importlib/__init__.py diff --git a/Lib/importlib/__init__.py b/Lib/importlib/__init__.py index 0c73c505f98db..bea37d766262f 100644 --- a/Lib/importlib/__init__.py +++ b/Lib/importlib/__init__.py @@ -54,7 +54,6 @@ # Fully bootstrapped at this point, import whatever you like, circular # dependencies and startup overhead minimisation permitting :) =20 -import types import warnings =20 =20 @@ -136,12 +135,13 @@ def reload(module): The module must have been successfully imported before. =20 """ - if not module or not isinstance(module, types.ModuleType): - raise TypeError("reload() argument must be a module") try: name =3D module.__spec__.name except AttributeError: - name =3D module.__name__ + try: + name =3D module.__name__ + except AttributeError: + raise TypeError("reload() argument must be a module") =20 if sys.modules.get(name) is not module: msg =3D "module {} not in sys.modules" diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-18-35.bpo-19468= .S-TA7p.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-18-35.bpo-1946= 8.S-TA7p.rst new file mode 100644 index 0000000000000..e35750e37f4da --- /dev/null +++ b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-23-18-35.bpo-19468.S-TA7p= .rst=09 @@ -0,0 +1,2 @@ +Delete unnecessary instance check in importlib.reload(). +Patch by Furkan =C3=96nder. From webhook-mailer at python.org Fri Jun 5 18:00:49 2020 From: webhook-mailer at python.org (=?utf-8?q?R=C3=A9mi?= Lapeyre) Date: Fri, 05 Jun 2020 22:00:49 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40862: Raise TypeError when const is given to argparse.BooleanOptionalAction (GH-20623) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/b084d1b97e369293d2d2bc0791e2135822c9= 23a8 commit: b084d1b97e369293d2d2bc0791e2135822c923a8 branch: master author: R=C3=A9mi Lapeyre committer: GitHub date: 2020-06-05T15:00:42-07:00 summary: bpo-40862: Raise TypeError when const is given to argparse.BooleanOptionalAct= ion (GH-20623) files: M Lib/argparse.py M Lib/test/test_argparse.py diff --git a/Lib/argparse.py b/Lib/argparse.py index 2677ef63e9e54..2fb1da59f942c 100644 --- a/Lib/argparse.py +++ b/Lib/argparse.py @@ -857,7 +857,6 @@ class BooleanOptionalAction(Action): def __init__(self, option_strings, dest, - const=3DNone, default=3DNone, type=3DNone, choices=3DNone, diff --git a/Lib/test/test_argparse.py b/Lib/test/test_argparse.py index e82a0c39c21a8..22cae626ccc29 100644 --- a/Lib/test/test_argparse.py +++ b/Lib/test/test_argparse.py @@ -700,6 +700,14 @@ class TestBooleanOptionalAction(ParserTestCase): ('--no-foo --foo', NS(foo=3DTrue)), ] =20 + def test_const(self): + # See bpo-40862 + parser =3D argparse.ArgumentParser() + with self.assertRaises(TypeError) as cm: + parser.add_argument('--foo', const=3DTrue, action=3Dargparse.Boo= leanOptionalAction) + + self.assertIn("got an unexpected keyword argument 'const'", str(cm.e= xception)) + class TestBooleanOptionalActionRequired(ParserTestCase): """Tests BooleanOptionalAction required""" =20 From webhook-mailer at python.org Sun Jun 7 03:05:40 2020 From: webhook-mailer at python.org (=?utf-8?q?R=C3=A9mi?= Lapeyre) Date: Sun, 07 Jun 2020 07:05:40 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Fix return type of test helper function heapctypewithbuffer_releasebuffer() (GH-20685) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/b8867e5d5aca33511942632b5f4e359b9245= b2fa commit: b8867e5d5aca33511942632b5f4e359b9245b2fa branch: master author: R=C3=A9mi Lapeyre committer: GitHub date: 2020-06-07T09:05:33+02:00 summary: Fix return type of test helper function heapctypewithbuffer_releasebuffer() (= GH-20685) files: M Modules/_testcapimodule.c diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c index d6a90b807d026..e0457ae5dfa55 100644 --- a/Modules/_testcapimodule.c +++ b/Modules/_testcapimodule.c @@ -6318,7 +6318,7 @@ heapctypewithbuffer_getbuffer(HeapCTypeWithBufferObject= *self, Py_buffer *view, view, (PyObject*)self, (void *)self->buffer, 4, 1, flags); } =20 -static int +static void heapctypewithbuffer_releasebuffer(HeapCTypeWithBufferObject *self, Py_buffer= *view) { assert(view->obj =3D=3D (void*) self); From webhook-mailer at python.org Tue Jun 9 21:02:16 2020 From: webhook-mailer at python.org (=?utf-8?q?=C3=89ric?= Araujo) Date: Wed, 10 Jun 2020 01:02:16 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-34003: Re-add versionchanged entry in csv docs (GH-20657) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/7aed0524d4129766a6032326949ef7f91f6f= 6dfc commit: 7aed0524d4129766a6032326949ef7f91f6f6dfc branch: master author: =C3=89ric Araujo committer: GitHub date: 2020-06-09T18:02:11-07:00 summary: bpo-34003: Re-add versionchanged entry in csv docs (GH-20657) Follow-up to GH-8014 files: M Doc/library/csv.rst diff --git a/Doc/library/csv.rst b/Doc/library/csv.rst index 61d39828e0194..7a72c26d5bade 100644 --- a/Doc/library/csv.rst +++ b/Doc/library/csv.rst @@ -167,6 +167,9 @@ The :mod:`csv` module defines the following classes: All other optional or keyword arguments are passed to the underlying :class:`reader` instance. =20 + .. versionchanged:: 3.6 + Returned rows are now of type :class:`OrderedDict`. + .. versionchanged:: 3.8 Returned rows are now of type :class:`dict`. =20 From webhook-mailer at python.org Fri Jun 12 08:54:34 2020 From: webhook-mailer at python.org (=?utf-8?q?G=C3=A9ry?= Ogam) Date: Fri, 12 Jun 2020 12:54:34 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Update lexical_analysis.rst (GH-17508) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/e2fb8a2c42ee60c72a40d93da69e9efc4e35= 9023 commit: e2fb8a2c42ee60c72a40d93da69e9efc4e359023 branch: master author: G=C3=A9ry Ogam committer: GitHub date: 2020-06-12T05:54:29-07:00 summary: Update lexical_analysis.rst (GH-17508) Use Sphinx role markup for `str.format`. Automerge-Triggered-By: @csabella files: M Doc/reference/lexical_analysis.rst diff --git a/Doc/reference/lexical_analysis.rst b/Doc/reference/lexical_analy= sis.rst index e3a3a88757ed2..7d70cbcaa372a 100644 --- a/Doc/reference/lexical_analysis.rst +++ b/Doc/reference/lexical_analysis.rst @@ -704,7 +704,7 @@ Top-level format specifiers may include nested replacemen= t fields. These nested fields may include their own conversion fields and :ref:`format specifiers `, but may not include more deeply-nested replacement fields. The :ref:`format specifier mini-language ` is the same as that used = by -the string .format() method. +the :meth:`str.format` method. =20 Formatted string literals may be concatenated, but replacement fields cannot be split across literals. From webhook-mailer at python.org Mon Jun 15 04:03:25 2020 From: webhook-mailer at python.org (=?utf-8?q?R=C3=A9mi?= Lapeyre) Date: Mon, 15 Jun 2020 08:03:25 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-40836: Add docstring to logging.fatal() and logging.Logger.fatal() (GH-20563) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/25f38d7044a3a47465edd851c4e04f337b2c= 4b9b commit: 25f38d7044a3a47465edd851c4e04f337b2c4b9b branch: master author: R=C3=A9mi Lapeyre committer: GitHub date: 2020-06-15T01:03:07-07:00 summary: bpo-40836: Add docstring to logging.fatal() and logging.Logger.fatal() (GH-20= 563) Automerge-Triggered-By: @vsajip files: M Lib/logging/__init__.py diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py index 6d27301a7056e..1c446fd421650 100644 --- a/Lib/logging/__init__.py +++ b/Lib/logging/__init__.py @@ -1480,7 +1480,11 @@ def critical(self, msg, *args, **kwargs): if self.isEnabledFor(CRITICAL): self._log(CRITICAL, msg, args, **kwargs) =20 - fatal =3D critical + def fatal(self, msg, *args, **kwargs): + """ + Don't use this method, use critical() instead. + """ + self.critical(msg, *args, **kwargs) =20 def log(self, level, msg, *args, **kwargs): """ @@ -2039,7 +2043,11 @@ def critical(msg, *args, **kwargs): basicConfig() root.critical(msg, *args, **kwargs) =20 -fatal =3D critical +def fatal(msg, *args, **kwargs): + """ + Don't use this function, use critical() instead. + """ + critical(msg, *args, **kwargs) =20 def error(msg, *args, **kwargs): """ From webhook-mailer at python.org Fri Jun 19 07:57:40 2020 From: webhook-mailer at python.org (=?utf-8?q?J=C3=BCrgen?= Gmach) Date: Fri, 19 Jun 2020 11:57:40 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Improve readability of `formataddr` docstring (GH-20963) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/66a65ba43cb3e68a43e32469c988dd7a6cff= 049c commit: 66a65ba43cb3e68a43e32469c988dd7a6cff049c branch: master author: J=C3=BCrgen Gmach committer: GitHub date: 2020-06-19T04:57:30-07:00 summary: Improve readability of `formataddr` docstring (GH-20963) For me as a non native English speaker, the sentence with its embedded clause= was very hard to understand. modified: Lib/email/utils.py Automerge-Triggered-By: @csabella files: M Lib/email/utils.py diff --git a/Lib/email/utils.py b/Lib/email/utils.py index b137ce3973a4b..1a7719dbc4898 100644 --- a/Lib/email/utils.py +++ b/Lib/email/utils.py @@ -81,7 +81,7 @@ def formataddr(pair, charset=3D'utf-8'): If the first element of pair is false, then the second element is returned unmodified. =20 - Optional charset if given is the character set that is used to encode + The optional charset is the character set that is used to encode realname in case realname is not ASCII safe. Can be an instance of str = or a Charset-like object which has a header_encode method. Default is 'utf-8'. From webhook-mailer at python.org Sat Jun 20 08:55:10 2020 From: webhook-mailer at python.org (Vincent =?utf-8?q?F=C3=A9rotin?=) Date: Sat, 20 Jun 2020 12:55:10 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-41024: doc: Explicitly mention use of 'enum.Enum' as a valid container for =?utf-8?q?=27=E2=80=A6?= (GH-20964) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/344c2a75c1c13de781962a3f80552e66a4c8= 9024 commit: 344c2a75c1c13de781962a3f80552e66a4c89024 branch: master author: Vincent F=C3=A9rotin committer: GitHub date: 2020-06-20T05:55:05-07:00 summary: bpo-41024: doc: Explicitly mention use of 'enum.Enum' as a valid container fo= r '=E2=80=A6 (GH-20964) =E2=80=A6choices' argument of 'argparse.ArgumentParser.add_argument'. Here's a short first proposal of doc. enhancement addressing [bpo-41024](). Automerge-Triggered-By: @csabella files: M Doc/library/argparse.rst diff --git a/Doc/library/argparse.rst b/Doc/library/argparse.rst index 5e0096cae73a7..0b64dfe47f768 100644 --- a/Doc/library/argparse.rst +++ b/Doc/library/argparse.rst @@ -1133,6 +1133,20 @@ container should match the type_ specified:: =20 Any container can be passed as the *choices* value, so :class:`list` objects, :class:`set` objects, and custom containers are all supported. +This includes :class:`enum.Enum`, which could be used to restrain +argument's choices; if we reuse previous rock/paper/scissors game example, +this could be as follows:: + + >>> from enum import Enum + >>> class GameMove(Enum): + ... ROCK =3D 'rock' + ... PAPER =3D 'paper' + ... SCISSORS =3D 'scissors' + ... + >>> parser =3D argparse.ArgumentParser(prog=3D'game.py') + >>> parser.add_argument('move', type=3DGameMove, choices=3DGameMove) + >>> parser.parse_args(['rock']) + Namespace(move=3D) =20 =20 required From webhook-mailer at python.org Wed Jun 24 06:47:22 2020 From: webhook-mailer at python.org (=?utf-8?q?J=C3=BCrgen?= Gmach) Date: Wed, 24 Jun 2020 10:47:22 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Fix typo in dataclasses module (GH-21109) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/80526f68411a9406a9067095fbf6a0f88047= cac5 commit: 80526f68411a9406a9067095fbf6a0f88047cac5 branch: master author: J=C3=BCrgen Gmach committer: GitHub date: 2020-06-24T03:46:52-07:00 summary: Fix typo in dataclasses module (GH-21109) Automerge-Triggered-By: @matrixise files: M Lib/dataclasses.py diff --git a/Lib/dataclasses.py b/Lib/dataclasses.py index fc69508354bbe..530d3e99574e8 100644 --- a/Lib/dataclasses.py +++ b/Lib/dataclasses.py @@ -1094,7 +1094,7 @@ def _asdict_inner(obj, dict_factory): # method, because: # - it does not recurse in to the namedtuple fields and # convert them to dicts (using dict_factory). - # - I don't actually want to return a dict here. The the main + # - I don't actually want to return a dict here. The main # use case here is json.dumps, and it handles converting # namedtuples to lists. Admittedly we're losing some # information here when we produce a json list instead of a From webhook-mailer at python.org Tue Jun 30 09:48:20 2020 From: webhook-mailer at python.org (=?utf-8?q?R=C3=A9mi?= Lapeyre) Date: Tue, 30 Jun 2020 13:48:20 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: bpo-39314: Closes parenthesis when autocompleting for functions that take no arguments (GH-20562) Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/bd4a3f21454a6012f4353e2255837561fc9f= 0e6a commit: bd4a3f21454a6012f4353e2255837561fc9f0e6a branch: master author: R=C3=A9mi Lapeyre committer: GitHub date: 2020-06-30T22:48:15+09:00 summary: bpo-39314: Closes parenthesis when autocompleting for functions that take no = arguments (GH-20562) files: A Misc/NEWS.d/next/Library/2020-06-01-02-16-29.bpo-39314.0T9hlA.rst M Lib/rlcompleter.py M Lib/test/test_rlcompleter.py diff --git a/Lib/rlcompleter.py b/Lib/rlcompleter.py index bca4a7bc5218a..c06388e8d9c2d 100644 --- a/Lib/rlcompleter.py +++ b/Lib/rlcompleter.py @@ -31,6 +31,7 @@ =20 import atexit import builtins +import inspect import __main__ =20 __all__ =3D ["Completer"] @@ -96,7 +97,13 @@ def complete(self, text, state): =20 def _callable_postfix(self, val, word): if callable(val): - word =3D word + "(" + word +=3D "(" + try: + if not inspect.signature(val).parameters: + word +=3D ")" + except ValueError: + pass + return word =20 def global_matches(self, text): diff --git a/Lib/test/test_rlcompleter.py b/Lib/test/test_rlcompleter.py index 0dc1080ca3209..ee3019d8782d1 100644 --- a/Lib/test/test_rlcompleter.py +++ b/Lib/test/test_rlcompleter.py @@ -40,12 +40,12 @@ def test_global_matches(self): =20 # test with a customized namespace self.assertEqual(self.completer.global_matches('CompleteM'), - ['CompleteMe(']) + ['CompleteMe()']) self.assertEqual(self.completer.global_matches('eg'), ['egg(']) # XXX: see issue5256 self.assertEqual(self.completer.global_matches('CompleteM'), - ['CompleteMe(']) + ['CompleteMe()']) =20 def test_attr_matches(self): # test with builtins namespace @@ -64,7 +64,7 @@ def test_attr_matches(self): ['CompleteMe.spam']) self.assertEqual(self.completer.attr_matches('Completeme.egg'), []) self.assertEqual(self.completer.attr_matches('CompleteMe.'), - ['CompleteMe.mro(', 'CompleteMe.spam']) + ['CompleteMe.mro()', 'CompleteMe.spam']) self.assertEqual(self.completer.attr_matches('CompleteMe._'), ['CompleteMe._ham']) matches =3D self.completer.attr_matches('CompleteMe.__') @@ -134,7 +134,7 @@ def test_duplicate_globals(self): # No opening bracket "(" because we overrode the built-in class self.assertEqual(completer.complete('memoryview', 0), 'memoryview') self.assertIsNone(completer.complete('memoryview', 1)) - self.assertEqual(completer.complete('Ellipsis', 0), 'Ellipsis(') + self.assertEqual(completer.complete('Ellipsis', 0), 'Ellipsis()') self.assertIsNone(completer.complete('Ellipsis', 1)) =20 if __name__ =3D=3D '__main__': diff --git a/Misc/NEWS.d/next/Library/2020-06-01-02-16-29.bpo-39314.0T9hlA.rs= t b/Misc/NEWS.d/next/Library/2020-06-01-02-16-29.bpo-39314.0T9hlA.rst new file mode 100644 index 0000000000000..e805332efb626 --- /dev/null +++ b/Misc/NEWS.d/next/Library/2020-06-01-02-16-29.bpo-39314.0T9hlA.rst @@ -0,0 +1,3 @@ +:class:`rlcompleter.Completer` and the standard Python shell now close the +parenthesis for functions that take no arguments. Patch contributed by R=C3= =A9mi +Lapeyre. From webhook-mailer at python.org Tue Jun 30 12:05:03 2020 From: webhook-mailer at python.org (=?utf-8?q?=C5=81ukasz?= Langa) Date: Tue, 30 Jun 2020 16:05:03 -0000 Subject: [Python-checkins] (no subject) Message-ID: To: python-checkins at python.org Subject: Python 3.8.4rc1 Content-Type: text/plain; charset="utf-8" Content-Transfer-Encoding: quoted-printable MIME-Version: 1.0 https://github.com/python/cpython/commit/6c38841c08edd6b0727903ec3f1acd10dc97= 66f6 commit: 6c38841c08edd6b0727903ec3f1acd10dc9766f6 branch: 3.8 author: =C5=81ukasz Langa committer: =C5=81ukasz Langa date: 2020-06-30T00:30:11+02:00 summary: Python 3.8.4rc1 files: A Misc/NEWS.d/3.8.4rc1.rst D Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst D Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf.rst D Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s.rst D Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2.rst D Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk.rst D Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6.rst D Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-35975.UDHCHp.rst D Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK.rst D Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse.rst D Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst D Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst D Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst D Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst D Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst D Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst D Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst D Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst D Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst D Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst D Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rst D Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst D Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst D Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst D Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst D Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst D Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst D Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst D Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst D Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst D Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst D Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst D Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst D Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst D Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst D Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst D Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst D Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst D Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst D Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst D Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst D Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst D Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst D Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst D Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst D Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst D Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst D Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst D Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst D Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst D Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst D Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst D Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst D Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst D Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst D Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst D Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst D Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst D Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst D Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst D Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst D Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst D Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst M Include/patchlevel.h M Lib/pydoc_data/topics.py M README.rst diff --git a/Include/patchlevel.h b/Include/patchlevel.h index e179252a35dce..ae867d4daf159 100644 --- a/Include/patchlevel.h +++ b/Include/patchlevel.h @@ -18,12 +18,12 @@ /*--start constants--*/ #define PY_MAJOR_VERSION 3 #define PY_MINOR_VERSION 8 -#define PY_MICRO_VERSION 3 -#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_FINAL -#define PY_RELEASE_SERIAL 0 +#define PY_MICRO_VERSION 4 +#define PY_RELEASE_LEVEL PY_RELEASE_LEVEL_GAMMA +#define PY_RELEASE_SERIAL 1 =20 /* Version as a string */ -#define PY_VERSION "3.8.3+" +#define PY_VERSION "3.8.4rc1" /*--end constants--*/ =20 /* Version as a single 4-byte hex number, e.g. 0x010502B2 =3D=3D 1.5.2b2. diff --git a/Lib/pydoc_data/topics.py b/Lib/pydoc_data/topics.py index 06f0e781772f8..0320964e5cf5f 100644 --- a/Lib/pydoc_data/topics.py +++ b/Lib/pydoc_data/topics.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -# Autogenerated by Sphinx on Wed May 13 19:29:27 2020 +# Autogenerated by Sphinx on Mon Jun 29 22:24:24 2020 topics =3D {'assert': 'The "assert" statement\n' '**********************\n' '\n' @@ -4284,7 +4284,8 @@ ' the current environment).\n' '\n' 'retval\n' - 'Print the return value for the last return of a function.\n' + '\n' + ' Print the return value for the last return of a function.\n' '\n' '-[ Footnotes ]-\n' '\n' @@ -6037,8 +6038,8 @@ '\n' 'A non-normative HTML file listing all valid identifier ' 'characters for\n' - 'Unicode 4.1 can be found at https://www.dcl.hpi.uni-\n' - 'potsdam.de/home/loewis/table-3131.html.\n' + 'Unicode 4.1 can be found at\n' + 'https://www.unicode.org/Public/13.0.0/ucd/DerivedCoreProper= ties.txt\n' '\n' '\n' 'Keywords\n' diff --git a/Misc/NEWS.d/3.8.4rc1.rst b/Misc/NEWS.d/3.8.4rc1.rst new file mode 100644 index 0000000000000..1f7cb9620040a --- /dev/null +++ b/Misc/NEWS.d/3.8.4rc1.rst @@ -0,0 +1,637 @@ +.. bpo: 41004 +.. date: 2020-06-29-16-02-29 +.. nonce: ovF0KZ +.. release date: 2020-06-29 +.. section: Security + +The __hash__() methods of ipaddress.IPv4Interface and +ipaddress.IPv6Interface incorrectly generated constant hash values of 32 and +128 respectively. This resulted in always causing hash collisions. The fix +uses hash() to generate hash values for the tuple of (address, mask length, +network address). + +.. + +.. bpo: 39073 +.. date: 2020-03-15-01-28-36 +.. nonce: 6Szd3i +.. section: Security + +Disallow CR or LF in email.headerregistry.Address arguments to guard against +header injection attacks. + +.. + +.. bpo: 41094 +.. date: 2020-06-23-23-26-42 +.. nonce: zEIJse +.. section: Core and Builtins + +Fix decoding errors with audit when open files with non-ASCII names on +non-UTF-8 locale. + +.. + +.. bpo: 41056 +.. date: 2020-06-21-19-53-33 +.. nonce: IDu_EK +.. section: Core and Builtins + +Fixes a reference to deallocated stack space during startup when +constructing sys.path involving a relative symlink when code was supplied +via -c. (discovered via Coverity) + +.. + +.. bpo: 35975 +.. date: 2020-06-20-17-21-07 +.. nonce: UDHCHp +.. section: Core and Builtins + +Stefan Behnel reported that cf_feature_version is used even when +PyCF_ONLY_AST is not set. This is against the intention and against the +documented behavior, so it's been fixed. + +.. + +.. bpo: 40957 +.. date: 2020-06-12-12-21-54 +.. nonce: Z8n6I6 +.. section: Core and Builtins + +Fix refleak in _Py_fopen_obj() when PySys_Audit() fails + +.. + +.. bpo: 40870 +.. date: 2020-06-05-12-48-28 +.. nonce: 9cd2sk +.. section: Core and Builtins + +Raise :exc:`ValueError` when validating custom AST's where the constants +``True``, ``False`` and ``None`` are used within a :class:`ast.Name` node. + +.. + +.. bpo: 40826 +.. date: 2020-06-01-20-31-07 +.. nonce: XCI4M2 +.. section: Core and Builtins + +Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set an exception +and pass the Python thread state when checking if there is a pending signal. + +.. + +.. bpo: 40824 +.. date: 2020-05-30-14-37-18 +.. nonce: XR3V5s +.. section: Core and Builtins + +Unexpected errors in calling the ``__iter__`` method are no longer masked by +``TypeError`` in the :keyword:`in` operator and functions +:func:`~operator.contains`, :func:`~operator.indexOf` and +:func:`~operator.countOf` of the :mod:`operator` module. + +.. + +.. bpo: 40663 +.. date: 2020-05-17-20-38-12 +.. nonce: u2aiZf +.. section: Core and Builtins + +Correctly generate annotations where parentheses are omitted but required +(e.g: ``Type[(str, int, *other))]``. + +.. + +.. bpo: 41138 +.. date: 2020-06-27-13-51-36 +.. nonce: bIpf7g +.. section: Library + +Fixed the :mod:`trace` module CLI for Python source files with non-UTF-8 +encoding. + +.. + +.. bpo: 31938 +.. date: 2020-06-22-20-08-40 +.. nonce: EVuko9 +.. section: Library + +Fix default-value signatures of several functions in the :mod:`select` +module - by Anthony Sottile. + +.. + +.. bpo: 41068 +.. date: 2020-06-22-10-25-39 +.. nonce: _bX2BW +.. section: Library + +Fixed reading files with non-ASCII names from ZIP archive directly after +writing them. + +.. + +.. bpo: 41058 +.. date: 2020-06-20-21-03-55 +.. nonce: gztdZy +.. section: Library + +:func:`pdb.find_function` now correctly determines the source file encoding. + +.. + +.. bpo: 41056 +.. date: 2020-06-20-18-35-43 +.. nonce: Garcle +.. section: Library + +Fix a NULL pointer dereference within the ssl module during a MemoryError in +the keylog callback. (discovered by Coverity) + +.. + +.. bpo: 41048 +.. date: 2020-06-20-10-16-57 +.. nonce: hEXB-B +.. section: Library + +:func:`mimetypes.read_mime_types` function reads the rule file using UTF-8 +encoding, not the locale encoding. Patch by Srinivas Reddy Thatiparthy. + +.. + +.. bpo: 40448 +.. date: 2020-06-15-12-22-53 +.. nonce: 1dk8Bu +.. section: Library + +:mod:`ensurepip` now disables the use of `pip` cache when installing the +bundled versions of `pip` and `setuptools`. Patch by Krzysztof Konopko. + +.. + +.. bpo: 40855 +.. date: 2020-06-12-10-44-15 +.. nonce: jSot83 +.. section: Library + +The standard deviation and variance functions in the statistics module were +ignoring their mu and xbar arguments. + +.. + +.. bpo: 40807 +.. date: 2020-06-04-16-25-15 +.. nonce: yYyLWx +.. section: Library + +Stop codeop._maybe_compile, used by code.InteractiveInterpreter (and IDLE). +from from emitting each warning three times. + +.. + +.. bpo: 40834 +.. date: 2020-05-31-15-52-18 +.. nonce: MO9_hb +.. section: Library + +Fix truncate when sending str object with_xxsubinterpreters.channel_send. + +.. + +.. bpo: 38488 +.. date: 2020-05-28-16-51-00 +.. nonce: hFQNgA +.. section: Library + +Update ensurepip to install pip 20.1.1 and setuptools 47.1.0. + +.. + +.. bpo: 40767 +.. date: 2020-05-27-21-27-01 +.. nonce: L5MnVV +.. section: Library + +:mod:`webbrowser` now properly finds the default browser in pure Wayland +systems by checking the WAYLAND_DISPLAY environment variable. Patch +contributed by J=C3=A9r=C3=A9my Attali. + +.. + +.. bpo: 40795 +.. date: 2020-05-27-17-00-18 +.. nonce: eZSnHA +.. section: Library + +:mod:`ctypes` module: If ctypes fails to convert the result of a callback or +if a ctypes callback function raises an exception, sys.unraisablehook is now +called with an exception set. Previously, the error was logged into stderr +by :c:func:`PyErr_Print`. + +.. + +.. bpo: 30008 +.. date: 2020-05-25-22-18-38 +.. nonce: CKC3td +.. section: Library + +Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds that use +``no-deprecated`` and ``--api=3D1.1.0``. + +.. + +.. bpo: 40614 +.. date: 2020-05-18-22-41-02 +.. nonce: 8j3kmq +.. section: Library + +:func:`ast.parse` will not parse self documenting expressions in f-strings +when passed ``feature_version`` is less than ``(3, 8)``. + +.. + +.. bpo: 40626 +.. date: 2020-05-18-17-29-30 +.. nonce: NeZufF +.. section: Library + +Add h5 file extension as MIME Type application/x-hdf5, as per HDF Group +recommendation for HDF5 formatted data files. Patch contributed by Mark +Schwab. + +.. + +.. bpo: 25872 +.. date: 2020-05-14-13-25-36 +.. nonce: 5D5538 +.. section: Library + +:mod:`linecache` could crash with a :exc:`KeyError` when accessed from +multiple threads. Fix by Michael Graczyk. + +.. + +.. bpo: 40597 +.. date: 2020-05-11-19-17-23 +.. nonce: 4SGfgm +.. section: Library + +If text content lines are longer than policy.max_line_length, always use a +content-encoding to make sure they are wrapped. + +.. + +.. bpo: 40515 +.. date: 2020-05-06-13-51-19 +.. nonce: TUCvYB +.. section: Library + +The :mod:`ssl` and :mod:`hashlib` modules now actively check that OpenSSL is +build with thread support. Python 3.7.0 made thread support mandatory and no +longer works safely with a no-thread builds. + +.. + +.. bpo: 13097 +.. date: 2020-05-06-02-01-25 +.. nonce: Wh5xSK +.. section: Library + +``ctypes`` now raises an ``ArgumentError`` when a callback is invoked with +more than 1024 arguments. + +.. + +.. bpo: 40457 +.. date: 2020-05-02-17-17-37 +.. nonce: EXReI1 +.. section: Library + +The ssl module now support OpenSSL builds without TLS 1.0 and 1.1 methods. + +.. + +.. bpo: 39830 +.. date: 2020-03-23-05-21-13 +.. nonce: IkqU1Y +.. section: Library + +Add :class:`zipfile.Path` to ``__all__`` in the :mod:`zipfile` module. + +.. + +.. bpo: 40025 +.. date: 2020-03-21-05-26-38 +.. nonce: DTLtyq +.. section: Library + +Raise TypeError when _generate_next_value_ is defined after members. Patch +by Ethan Onstott. + +.. + +.. bpo: 39244 +.. date: 2020-02-23-15-09-47 +.. nonce: aBK5IM +.. section: Library + +Fixed :class:`multiprocessing.context.get_all_start_methods` to properly +return the default method first on macOS. + +.. + +.. bpo: 39040 +.. date: 2019-12-15-18-47-20 +.. nonce: tKa0Qs +.. section: Library + +Fix parsing of invalid mime headers parameters by collapsing whitespace +between encoded words in a bare-quote-string. + +.. + +.. bpo: 35714 +.. date: 2019-10-25-23-45-49 +.. nonce: fw3xb7 +.. section: Library + +:exc:`struct.error` is now raised if there is a null character in a +:mod:`struct` format string. + +.. + +.. bpo: 36290 +.. date: 2019-03-17-19-01-53 +.. nonce: 7VXo_K +.. section: Library + +AST nodes are now raising :exc:`TypeError` on conflicting keyword arguments. +Patch contributed by R=C3=A9mi Lapeyre. + +.. + +.. bpo: 29620 +.. date: 2018-08-21-16-20-33 +.. nonce: xxx666 +.. section: Library + +:func:`~unittest.TestCase.assertWarns` no longer raises a +``RuntimeException`` when accessing a module's ``__warningregistry__`` +causes importation of a new module, or when a new module is imported in +another thread. Patch by Kernc. + +.. + +.. bpo: 34226 +.. date: 2018-07-29-12-14-54 +.. nonce: BE7zbu +.. section: Library + +Fix `cgi.parse_multipart` without content_length. Patch by Roger Duran + +.. + +.. bpo: 41085 +.. date: 2020-06-23-12-02-45 +.. nonce: JZKsyz +.. section: Tests + +Fix integer overflow in the :meth:`array.array.index` method on 64-bit +Windows for index larger than ``2**31``. + +.. + +.. bpo: 38377 +.. date: 2020-06-17-18-00-21 +.. nonce: jfg4TH +.. section: Tests + +On Linux, skip tests using multiprocessing if the current user cannot create +a file in ``/dev/shm/`` directory. Add the +:func:`~test.support.skip_if_broken_multiprocessing_synchronize` function to +the :mod:`test.support` module. + +.. + +.. bpo: 41009 +.. date: 2020-06-17-17-27-07 +.. nonce: Rvn6OQ +.. section: Tests + +Fix use of ``support.require_{linux|mac|freebsd}_version()`` decorators as +class decorator. + +.. + +.. bpo: 41003 +.. date: 2020-06-17-15-07-14 +.. nonce: tiH_Fy +.. section: Tests + +Fix ``test_copyreg`` when ``numpy`` is installed: ``test.pickletester`` now +saves/restores warnings filters when importing ``numpy``, to ignore filters +installed by ``numpy``. + +.. + +.. bpo: 40964 +.. date: 2020-06-12-20-46-23 +.. nonce: OBzf2c +.. section: Tests + +Disable remote :mod:`imaplib` tests, host cyrus.andrew.cmu.edu is blocking +incoming connections. + +.. + +.. bpo: 40055 +.. date: 2020-05-15-01-21-44 +.. nonce: Xp4aP9 +.. section: Tests + +distutils.tests now saves/restores warnings filters to leave them unchanged. +Importing tests imports docutils which imports pkg_resources which adds a +warnings filter. + +.. + +.. bpo: 34401 +.. date: 2018-08-20-09-38-52 +.. nonce: eGxMPm +.. section: Tests + +Make test_gdb properly run on HP-UX. Patch by Michael Osipov. + +.. + +.. bpo: 40204 +.. date: 2020-06-25-06-59-13 +.. nonce: GpD04D +.. section: Build + +Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. + +.. + +.. bpo: 40653 +.. date: 2020-05-17-03-33-00 +.. nonce: WI8UGn +.. section: Build + +Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling issue. + +.. + +.. bpo: 41074 +.. date: 2020-06-24-21-30-42 +.. nonce: gaQc3C +.. section: Windows + +Fixed support of non-ASCII names in functions :func:`msilib.OpenDatabase` +and :func:`msilib.init_database` and non-ASCII SQL in method +:meth:`msilib.Database.OpenView`. + +.. + +.. bpo: 40164 +.. date: 2020-06-12-13-13-44 +.. nonce: SPrSn5 +.. section: Windows + +Updates Windows OpenSSL to 1.1.1g + +.. + +.. bpo: 39631 +.. date: 2020-05-19-14-43-33 +.. nonce: Z5yXam +.. section: Windows + +Changes the registered MIME type for ``.py`` files on Windows to +``text/x-python`` instead of ``text/plain``. + +.. + +.. bpo: 40677 +.. date: 2020-05-19-04-11-12 +.. nonce: qQbLW8 +.. section: Windows + +Manually define IO_REPARSE_TAG_APPEXECLINK in case some old Windows SDK +doesn't have it. + +.. + +.. bpo: 40650 +.. date: 2020-05-17-00-08-13 +.. nonce: 4euMtU +.. section: Windows + +Include winsock2.h in pytime.c for timeval. + +.. + +.. bpo: 39148 +.. date: 2020-03-23-19-07-55 +.. nonce: W1YJEb +.. section: Windows + +Add IPv6 support to :mod:`asyncio` datagram endpoints in ProactorEventLoop. +Change the raised exception for unknown address families to ValueError as +it's not coming from Windows API. + +.. + +.. bpo: 39580 +.. date: 2020-06-25-06-09-00 +.. nonce: N_vJ9h +.. section: macOS + +Avoid opening Finder window if running installer from the command line. +Patch contributed by Rick Heil. + +.. + +.. bpo: 41100 +.. date: 2020-06-24-13-51-57 +.. nonce: mcHdc5 +.. section: macOS + +Fix configure error when building on macOS 11. Note that the current Python +release was released shortly after the first developer preview of macOS 11 +(Big Sur); there are other known issues with building and running on the +developer preview. Big Sur is expected to be fully supported in a future +bugfix release of Python 3.8.x and with 3.9.0. + +.. + +.. bpo: 41005 +.. date: 2020-06-17-13-45-15 +.. nonce: zZegdV +.. section: macOS + +fixed an XDG settings issue not allowing macos to open browser in +webbrowser.py + +.. + +.. bpo: 40741 +.. date: 2020-06-07-20-10-56 +.. nonce: 80A2BW +.. section: macOS + +Update macOS installer to use SQLite 3.32.2. + +.. + +.. bpo: 41144 +.. date: 2020-06-27-17-02-00 +.. nonce: JoFGIX +.. section: IDLE + +Make Open Module open a special module such as os.path. + +.. + +.. bpo: 39885 +.. date: 2020-05-29-18-21-58 +.. nonce: zB_-bN +.. section: IDLE + +Make context menu Cut and Copy work again when right-clicking within a +selection. + +.. + +.. bpo: 40723 +.. date: 2020-05-24-06-19-43 +.. nonce: AJLd4U +.. section: IDLE + +Make test_idle pass when run after import. + +.. + +.. bpo: 40479 +.. date: 2020-05-15-17-48-25 +.. nonce: B1gBl- +.. section: Tools/Demos + +Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, +and 3.0.0-alpha. + +.. + +.. bpo: 40163 +.. date: 2020-04-03-08-32-31 +.. nonce: lX8K4B +.. section: Tools/Demos + +Fix multissltest tool. OpenSSL has changed download URL for old releases. +The multissltest tool now tries to download from current and old download +URLs. diff --git a/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst = b/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst deleted file mode 100644 index 1e6c5cb32b722..0000000000000 --- a/Misc/NEWS.d/next/Build/2020-05-17-03-33-00.bpo-40653.WI8UGn.rst +++ /dev/null @@ -1 +0,0 @@ -Move _dirnameW out of HAVE_SYMLINK to fix a potential compiling issue. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst = b/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst deleted file mode 100644 index 25a6d751e5f45..0000000000000 --- a/Misc/NEWS.d/next/Build/2020-06-25-06-59-13.bpo-40204.GpD04D.rst +++ /dev/null @@ -1 +0,0 @@ -Pin Sphinx version to 2.3.1 in ``Doc/Makefile``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663= .u2aiZf.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-4066= 3.u2aiZf.rst deleted file mode 100644 index 5041abc7e3eaa..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-17-20-38-12.bpo-40663.u2aiZf= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Correctly generate annotations where parentheses are omitted but required -(e.g: ``Type[(str, int, *other))]``. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824= .XR3V5s.rst b/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-4082= 4.XR3V5s.rst deleted file mode 100644 index 73c593c04a0da..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-05-30-14-37-18.bpo-40824.XR3V5s= .rst=09 +++ /dev/null @@ -1,4 +0,0 @@ -Unexpected errors in calling the ``__iter__`` method are no longer masked by -``TypeError`` in the :keyword:`in` operator and functions -:func:`~operator.contains`, :func:`~operator.indexOf` and -:func:`~operator.countOf` of the :mod:`operator` module. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826= .XCI4M2.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-4082= 6.XCI4M2.rst deleted file mode 100644 index a03ed180eb952..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-06-01-20-31-07.bpo-40826.XCI4M2= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Fix GIL usage in :c:func:`PyOS_Readline`: lock the GIL to set an exception -and pass the Python thread state when checking if there is a pending signal. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870= .9cd2sk.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-4087= 0.9cd2sk.rst deleted file mode 100644 index 8e943a29f337f..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-06-05-12-48-28.bpo-40870.9cd2sk= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Raise :exc:`ValueError` when validating custom AST's where the constants -``True``, ``False`` and ``None`` are used within a :class:`ast.Name` node. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957= .Z8n6I6.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-4095= 7.Z8n6I6.rst deleted file mode 100644 index f99c374f94aac..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-06-12-12-21-54.bpo-40957.Z8n6I6= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Fix refleak in _Py_fopen_obj() when PySys_Audit() fails diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-35975= .UDHCHp.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-3597= 5.UDHCHp.rst deleted file mode 100644 index 73f4a6da2e5c0..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-06-20-17-21-07.bpo-35975.UDHCHp= .rst=09 +++ /dev/null @@ -1,3 +0,0 @@ -Stefan Behnel reported that cf_feature_version is used even when -PyCF_ONLY_AST is not set. This is against the intention and against the -documented behavior, so it's been fixed. diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056= .IDu_EK.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-4105= 6.IDu_EK.rst deleted file mode 100644 index 25f93c9da3105..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-06-21-19-53-33.bpo-41056.IDu_EK= .rst=09 +++ /dev/null @@ -1 +0,0 @@ -Fixes a reference to deallocated stack space during startup when constructin= g sys.path involving a relative symlink when code was supplied via -c. (disc= overed via Coverity) \ No newline at end of file diff --git a/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094= .zEIJse.rst b/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-4109= 4.zEIJse.rst deleted file mode 100644 index 6dd45e21d1758..0000000000000 --- a/Misc/NEWS.d/next/Core and Builtins/2020-06-23-23-26-42.bpo-41094.zEIJse= .rst=09 +++ /dev/null @@ -1,2 +0,0 @@ -Fix decoding errors with audit when open files with non-ASCII names on non-U= TF-8 -locale. diff --git a/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst b= /Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst deleted file mode 100644 index e0de2f9d83668..0000000000000 --- a/Misc/NEWS.d/next/IDLE/2020-05-24-06-19-43.bpo-40723.AJLd4U.rst +++ /dev/null @@ -1 +0,0 @@ -Make test_idle pass when run after import. diff --git a/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst b= /Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst deleted file mode 100644 index a847b75997117..0000000000000 --- a/Misc/NEWS.d/next/IDLE/2020-05-29-18-21-58.bpo-39885.zB_-bN.rst +++ /dev/null @@ -1,2 +0,0 @@ -Make context menu Cut and Copy work again when right-clicking within a -selection. diff --git a/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst b= /Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst deleted file mode 100644 index ed558d3e7ded1..0000000000000 --- a/Misc/NEWS.d/next/IDLE/2020-06-27-17-02-00.bpo-41144.JoFGIX.rst +++ /dev/null @@ -1 +0,0 @@ -Make Open Module open a special module such as os.path. diff --git a/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rs= t b/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst deleted file mode 100644 index 2656b4bf22ae4..0000000000000 --- a/Misc/NEWS.d/next/Library/2018-07-29-12-14-54.bpo-34226.BE7zbu.rst +++ /dev/null @@ -1 +0,0 @@ -Fix `cgi.parse_multipart` without content_length. Patch by Roger Duran diff --git a/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rs= t b/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst deleted file mode 100644 index d781919504e68..0000000000000 --- a/Misc/NEWS.d/next/Library/2018-08-21-16-20-33.bpo-29620.xxx666.rst +++ /dev/null @@ -1,3 +0,0 @@ -:func:`~unittest.TestCase.assertWarns` no longer raises a ``RuntimeException= `` -when accessing a module's ``__warningregistry__`` causes importation of a new -module, or when a new module is imported in another thread. Patch by Kernc. diff --git a/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rs= t b/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst deleted file mode 100644 index a9afe62b0c46e..0000000000000 --- a/Misc/NEWS.d/next/Library/2019-03-17-19-01-53.bpo-36290.7VXo_K.rst +++ /dev/null @@ -1,2 +0,0 @@ -AST nodes are now raising :exc:`TypeError` on conflicting keyword arguments. -Patch contributed by R=C3=A9mi Lapeyre. diff --git a/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rs= t b/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst deleted file mode 100644 index 39102065ca7b5..0000000000000 --- a/Misc/NEWS.d/next/Library/2019-10-25-23-45-49.bpo-35714.fw3xb7.rst +++ /dev/null @@ -1,2 +0,0 @@ -:exc:`struct.error` is now raised if there is a null character in a -:mod:`struct` format string. diff --git a/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rs= t b/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst deleted file mode 100644 index 078bce22be30f..0000000000000 --- a/Misc/NEWS.d/next/Library/2019-12-15-18-47-20.bpo-39040.tKa0Qs.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix parsing of invalid mime headers parameters by collapsing whitespace betw= een -encoded words in a bare-quote-string. diff --git a/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rs= t b/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst deleted file mode 100644 index c7d8e0de676b5..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-02-23-15-09-47.bpo-39244.aBK5IM.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed :class:`multiprocessing.context.get_all_start_methods` -to properly return the default method first on macOS. diff --git a/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rs= t b/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst deleted file mode 100644 index 7b699de4e0726..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-03-21-05-26-38.bpo-40025.DTLtyq.rst +++ /dev/null @@ -1 +0,0 @@ -Raise TypeError when _generate_next_value_ is defined after members. Patch b= y Ethan Onstott. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rs= t b/Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rst deleted file mode 100644 index fc9c650cc39f2..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-03-23-05-21-13.bpo-39830.IkqU1Y.rst +++ /dev/null @@ -1 +0,0 @@ -Add :class:`zipfile.Path` to ``__all__`` in the :mod:`zipfile` module. diff --git a/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rs= t b/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst deleted file mode 100644 index 19b6dd685cd8c..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-02-17-17-37.bpo-40457.EXReI1.rst +++ /dev/null @@ -1 +0,0 @@ -The ssl module now support OpenSSL builds without TLS 1.0 and 1.1 methods. diff --git a/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rs= t b/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst deleted file mode 100644 index a7f5f58828917..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-06-02-01-25.bpo-13097.Wh5xSK.rst +++ /dev/null @@ -1 +0,0 @@ -``ctypes`` now raises an ``ArgumentError`` when a callback is invoked with m= ore than 1024 arguments. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rs= t b/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst deleted file mode 100644 index af77a57fe7237..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-06-13-51-19.bpo-40515.TUCvYB.rst +++ /dev/null @@ -1,3 +0,0 @@ -The :mod:`ssl` and :mod:`hashlib` modules now actively check that OpenSSL is -build with thread support. Python 3.7.0 made thread support mandatory and no -longer works safely with a no-thread builds. diff --git a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rs= t b/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst deleted file mode 100644 index 1b9fe609c25b7..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-11-19-17-23.bpo-40597.4SGfgm.rst +++ /dev/null @@ -1 +0,0 @@ -If text content lines are longer than policy.max_line_length, always use a c= ontent-encoding to make sure they are wrapped. diff --git a/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rs= t b/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst deleted file mode 100644 index 3fd8bac73edbe..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-14-13-25-36.bpo-25872.5D5538.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`linecache` could crash with a :exc:`KeyError` when accessed from multi= ple threads. -Fix by Michael Graczyk. diff --git a/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rs= t b/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst deleted file mode 100644 index fe652cd7ee39d..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-18-17-29-30.bpo-40626.NeZufF.rst +++ /dev/null @@ -1 +0,0 @@ -Add h5 file extension as MIME Type application/x-hdf5, as per HDF Group reco= mmendation for HDF5 formatted data files. Patch contributed by Mark Schwab. diff --git a/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rs= t b/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst deleted file mode 100644 index 238b98c14a326..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-18-22-41-02.bpo-40614.8j3kmq.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`ast.parse` will not parse self documenting expressions in f-strings w= hen passed ``feature_version`` is less than ``(3, 8)``. diff --git a/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rs= t b/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst deleted file mode 100644 index c4cfa56ce02c5..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-25-22-18-38.bpo-30008.CKC3td.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix :mod:`ssl` code to be compatible with OpenSSL 1.1.x builds that use -``no-deprecated`` and ``--api=3D1.1.0``. diff --git a/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rs= t b/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst deleted file mode 100644 index dd02fb05cab5e..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-27-17-00-18.bpo-40795.eZSnHA.rst +++ /dev/null @@ -1,4 +0,0 @@ -:mod:`ctypes` module: If ctypes fails to convert the result of a callback or -if a ctypes callback function raises an exception, sys.unraisablehook is now -called with an exception set. Previously, the error was logged into stderr -by :c:func:`PyErr_Print`. diff --git a/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rs= t b/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst deleted file mode 100644 index 4bebb311b4d54..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-27-21-27-01.bpo-40767.L5MnVV.rst +++ /dev/null @@ -1,3 +0,0 @@ -:mod:`webbrowser` now properly finds the default browser in pure Wayland -systems by checking the WAYLAND_DISPLAY environment variable. Patch -contributed by J=C3=A9r=C3=A9my Attali. diff --git a/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rs= t b/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst deleted file mode 100644 index c44da9fecb605..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-28-16-51-00.bpo-38488.hFQNgA.rst +++ /dev/null @@ -1 +0,0 @@ -Update ensurepip to install pip 20.1.1 and setuptools 47.1.0. diff --git a/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rs= t b/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst deleted file mode 100644 index 272783773ff94..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-05-31-15-52-18.bpo-40834.MO9_hb.rst +++ /dev/null @@ -1 +0,0 @@ -Fix truncate when sending str object with_xxsubinterpreters.channel_send. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rs= t b/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst deleted file mode 100644 index 532b809b77eed..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-04-16-25-15.bpo-40807.yYyLWx.rst +++ /dev/null @@ -1,2 +0,0 @@ -Stop codeop._maybe_compile, used by code.InteractiveInterpreter (and IDLE). -from from emitting each warning three times. diff --git a/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rs= t b/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst deleted file mode 100644 index 201d510327a47..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-12-10-44-15.bpo-40855.jSot83.rst +++ /dev/null @@ -1,2 +0,0 @@ -The standard deviation and variance functions in the statistics module were -ignoring their mu and xbar arguments. diff --git a/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rs= t b/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst deleted file mode 100644 index a755c5faa671c..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-15-12-22-53.bpo-40448.1dk8Bu.rst +++ /dev/null @@ -1,2 +0,0 @@ -:mod:`ensurepip` now disables the use of `pip` cache when installing the -bundled versions of `pip` and `setuptools`. Patch by Krzysztof Konopko. diff --git a/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rs= t b/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst deleted file mode 100644 index 2595900137d69..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-20-10-16-57.bpo-41048.hEXB-B.rst +++ /dev/null @@ -1,2 +0,0 @@ -:func:`mimetypes.read_mime_types` function reads the rule file using UTF-8 e= ncoding, not the locale encoding. -Patch by Srinivas Reddy Thatiparthy. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rs= t b/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst deleted file mode 100644 index 1776f0d1cf8a3..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-20-18-35-43.bpo-41056.Garcle.rst +++ /dev/null @@ -1 +0,0 @@ -Fix a NULL pointer dereference within the ssl module during a MemoryError in= the keylog callback. (discovered by Coverity) \ No newline at end of file diff --git a/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rs= t b/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst deleted file mode 100644 index 6ac90098aa52b..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-20-21-03-55.bpo-41058.gztdZy.rst +++ /dev/null @@ -1 +0,0 @@ -:func:`pdb.find_function` now correctly determines the source file encoding. diff --git a/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rs= t b/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst deleted file mode 100644 index 20580c7886fac..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-22-10-25-39.bpo-41068._bX2BW.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed reading files with non-ASCII names from ZIP archive directly after -writing them. diff --git a/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rs= t b/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst deleted file mode 100644 index 0488e94d42e8c..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-22-20-08-40.bpo-31938.EVuko9.rst +++ /dev/null @@ -1 +0,0 @@ -Fix default-value signatures of several functions in the :mod:`select` modul= e - by Anthony Sottile. diff --git a/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rs= t b/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst deleted file mode 100644 index 839d430e89b66..0000000000000 --- a/Misc/NEWS.d/next/Library/2020-06-27-13-51-36.bpo-41138.bIpf7g.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fixed the :mod:`trace` module CLI for Python source files with non-UTF-8 -encoding. diff --git a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.r= st b/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst deleted file mode 100644 index 6c9447b897bf6..0000000000000 --- a/Misc/NEWS.d/next/Security/2020-03-15-01-28-36.bpo-39073.6Szd3i.rst +++ /dev/null @@ -1 +0,0 @@ -Disallow CR or LF in email.headerregistry.Address arguments to guard against= header injection attacks. diff --git a/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.r= st b/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst deleted file mode 100644 index 1380b31fbe9f4..0000000000000 --- a/Misc/NEWS.d/next/Security/2020-06-29-16-02-29.bpo-41004.ovF0KZ.rst +++ /dev/null @@ -1 +0,0 @@ -The __hash__() methods of ipaddress.IPv4Interface and ipaddress.IPv6Interfa= ce incorrectly generated constant hash values of 32 and 128 respectively. Thi= s resulted in always causing hash collisions. The fix uses hash() to generate= hash values for the tuple of (address, mask length, network address). diff --git a/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst = b/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst deleted file mode 100644 index 1b28d94c056d4..0000000000000 --- a/Misc/NEWS.d/next/Tests/2018-08-20-09-38-52.bpo-34401.eGxMPm.rst +++ /dev/null @@ -1 +0,0 @@ -Make test_gdb properly run on HP-UX. Patch by Michael Osipov. diff --git a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst = b/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst deleted file mode 100644 index edb01182c3a5c..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-05-15-01-21-44.bpo-40055.Xp4aP9.rst +++ /dev/null @@ -1,3 +0,0 @@ -distutils.tests now saves/restores warnings filters to leave them unchanged. -Importing tests imports docutils which imports pkg_resources which adds a -warnings filter. diff --git a/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst = b/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst deleted file mode 100644 index abfe4f0da4351..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-06-12-20-46-23.bpo-40964.OBzf2c.rst +++ /dev/null @@ -1,2 +0,0 @@ -Disable remote :mod:`imaplib` tests, host cyrus.andrew.cmu.edu is blocking -incoming connections. diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst = b/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst deleted file mode 100644 index 6f908d99feaf7..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-06-17-15-07-14.bpo-41003.tiH_Fy.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix ``test_copyreg`` when ``numpy`` is installed: ``test.pickletester`` now -saves/restores warnings filters when importing ``numpy``, to ignore filters -installed by ``numpy``. diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst = b/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst deleted file mode 100644 index 1208c119a3556..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-06-17-17-27-07.bpo-41009.Rvn6OQ.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix use of ``support.require_{linux|mac|freebsd}_version()`` decorators as -class decorator. diff --git a/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst = b/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst deleted file mode 100644 index 11a30761d36c9..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-06-17-18-00-21.bpo-38377.jfg4TH.rst +++ /dev/null @@ -1,4 +0,0 @@ -On Linux, skip tests using multiprocessing if the current user cannot create -a file in ``/dev/shm/`` directory. Add the -:func:`~test.support.skip_if_broken_multiprocessing_synchronize` function to -the :mod:`test.support` module. diff --git a/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst = b/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst deleted file mode 100644 index 463dffdd653ee..0000000000000 --- a/Misc/NEWS.d/next/Tests/2020-06-23-12-02-45.bpo-41085.JZKsyz.rst +++ /dev/null @@ -1,2 +0,0 @@ -Fix integer overflow in the :meth:`array.array.index` method on 64-bit Windo= ws -for index larger than ``2**31``. diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4= B.rst b/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst deleted file mode 100644 index fc0a22a0a953e..0000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2020-04-03-08-32-31.bpo-40163.lX8K4B.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fix multissltest tool. OpenSSL has changed download URL for old releases. -The multissltest tool now tries to download from current and old download -URLs. diff --git a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl= -.rst b/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst deleted file mode 100644 index b59035971d7b0..0000000000000 --- a/Misc/NEWS.d/next/Tools-Demos/2020-05-15-17-48-25.bpo-40479.B1gBl-.rst +++ /dev/null @@ -1,2 +0,0 @@ -Update multissltest helper to test with latest OpenSSL 1.0.2, 1.1.0, 1.1.1, -and 3.0.0-alpha. diff --git a/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rs= t b/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst deleted file mode 100644 index 7c70dce1e7333..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-03-23-19-07-55.bpo-39148.W1YJEb.rst +++ /dev/null @@ -1,3 +0,0 @@ -Add IPv6 support to :mod:`asyncio` datagram endpoints in ProactorEventLoop. -Change the raised exception for unknown address families to ValueError -as it's not coming from Windows API. diff --git a/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rs= t b/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst deleted file mode 100644 index db13e58b14a79..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-05-17-00-08-13.bpo-40650.4euMtU.rst +++ /dev/null @@ -1 +0,0 @@ -Include winsock2.h in pytime.c for timeval. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rs= t b/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst deleted file mode 100644 index a09cb243aba31..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-05-19-04-11-12.bpo-40677.qQbLW8.rst +++ /dev/null @@ -1 +0,0 @@ -Manually define IO_REPARSE_TAG_APPEXECLINK in case some old Windows SDK does= n't have it. \ No newline at end of file diff --git a/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rs= t b/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst deleted file mode 100644 index 38db4b431b6af..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-05-19-14-43-33.bpo-39631.Z5yXam.rst +++ /dev/null @@ -1,2 +0,0 @@ -Changes the registered MIME type for ``.py`` files on Windows to -``text/x-python`` instead of ``text/plain``. diff --git a/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rs= t b/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst deleted file mode 100644 index 6390de717d71f..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-06-12-13-13-44.bpo-40164.SPrSn5.rst +++ /dev/null @@ -1 +0,0 @@ -Updates Windows OpenSSL to 1.1.1g \ No newline at end of file diff --git a/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rs= t b/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst deleted file mode 100644 index ec91fd361c3de..0000000000000 --- a/Misc/NEWS.d/next/Windows/2020-06-24-21-30-42.bpo-41074.gaQc3C.rst +++ /dev/null @@ -1,3 +0,0 @@ -Fixed support of non-ASCII names in functions :func:`msilib.OpenDatabase` -and :func:`msilib.init_database` and non-ASCII SQL in method -:meth:`msilib.Database.OpenView`. diff --git a/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst = b/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst deleted file mode 100644 index 6ff7b9a805b95..0000000000000 --- a/Misc/NEWS.d/next/macOS/2020-06-07-20-10-56.bpo-40741.80A2BW.rst +++ /dev/null @@ -1 +0,0 @@ -Update macOS installer to use SQLite 3.32.2. diff --git a/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst = b/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst deleted file mode 100644 index 3b5f3f23a12f5..0000000000000 --- a/Misc/NEWS.d/next/macOS/2020-06-17-13-45-15.bpo-41005.zZegdV.rst +++ /dev/null @@ -1 +0,0 @@ -fixed an XDG settings issue not allowing macos to open browser in webbrowser= .py \ No newline at end of file diff --git a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst = b/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst deleted file mode 100644 index d6bb616136690..0000000000000 --- a/Misc/NEWS.d/next/macOS/2020-06-24-13-51-57.bpo-41100.mcHdc5.rst +++ /dev/null @@ -1,7 +0,0 @@ -Fix configure error when building on macOS 11. -Note that the current Python release was released -shortly after the first developer preview of macOS -11 (Big Sur); there are other known issues with -building and running on the developer preview. -Big Sur is expected to be fully supported in a -future bugfix release of Python 3.8.x and with 3.9.0. \ No newline at end of file diff --git a/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst = b/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst deleted file mode 100644 index 95d65359804d0..0000000000000 --- a/Misc/NEWS.d/next/macOS/2020-06-25-06-09-00.bpo-39580.N_vJ9h.rst +++ /dev/null @@ -1,2 +0,0 @@ -Avoid opening Finder window if running installer from the command line. -Patch contributed by Rick Heil. diff --git a/README.rst b/README.rst index ae71b671111de..a64d80f4c8e3d 100644 --- a/README.rst +++ b/README.rst @@ -1,5 +1,5 @@ -This is Python version 3.8.3 -=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D= =3D=3D=3D +This is Python version 3.8.4rc1 +=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D= =3D=3D=3D=3D=3D=3D =20 .. image:: https://travis-ci.org/python/cpython.svg?branch=3D3.8 :alt: CPython build status on Travis CI